carolanderson commited on
Commit
a8a9ff0
1 Parent(s): bd2ec7e

add basic chat app

Browse files
Files changed (3) hide show
  1. .gitignore +1 -0
  2. app.py +107 -0
  3. requirements.txt +3 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .ipynb_checkpoints
app.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ from langchain.chains import LLMChain
4
+ from langchain.chat_models import ChatOpenAI
5
+ from langchain.prompts.chat import (
6
+ ChatPromptTemplate,
7
+ MessagesPlaceholder,
8
+ SystemMessagePromptTemplate,
9
+ HumanMessagePromptTemplate,
10
+ )
11
+ from langchain.memory import ConversationBufferWindowMemory
12
+ from langchain.schema import AIMessage, HumanMessage
13
+ import streamlit as st
14
+
15
+
16
+ @st.cache_resource
17
+ def set_api_key(api_key):
18
+ os.environ["OPENAI_API_KEY"] = api_key
19
+
20
+
21
+ @st.cache_resource
22
+ def get_chain(model_name, temperature):
23
+ llm = ChatOpenAI(model_name=model_name, temperature=temperature)
24
+ memory = ConversationBufferWindowMemory(k=3, memory_key="chat_history", return_messages=True)
25
+ prompt = ChatPromptTemplate(
26
+ messages=[
27
+ SystemMessagePromptTemplate.from_template(
28
+ "You are a nice chatbot having a conversation with a human."
29
+ ),
30
+ MessagesPlaceholder(variable_name="chat_history"),
31
+ HumanMessagePromptTemplate.from_template("{input}")
32
+ ]
33
+ )
34
+ conversation = LLMChain(
35
+ llm=llm,
36
+ prompt=prompt,
37
+ verbose=True,
38
+ memory=memory
39
+ )
40
+ return conversation
41
+
42
+
43
+ def display_messages(chain):
44
+ """
45
+ Show the messages in the conversation buffer.
46
+ """
47
+ for message in chain.memory.buffer:
48
+ if isinstance(message, AIMessage):
49
+ role = "assistant"
50
+ elif isinstance(message, HumanMessage):
51
+ role = "user"
52
+ with st.chat_message(role):
53
+ st.write(message.content)
54
+
55
+
56
+ if __name__ == "__main__":
57
+ st.header("Basic chatbot")
58
+ with st.expander("How conversation history works"):
59
+ st.write("To keep input lengths down and costs reasonable,"
60
+ " this bot only 'remembers' the past three turns of conversation.")
61
+ st.write("Each combination of model type and temperature has its own unique chat history.")
62
+ st.write("To clear the current model's memory and start fresh, click 'Clear history'" )
63
+ API_KEY = st.sidebar.text_input(
64
+ 'API Key',
65
+ type='password',
66
+ help="Enter your OpenAI API key to use this app",
67
+ value=None)
68
+
69
+ model_name = st.sidebar.selectbox(
70
+ label = "Choose a model",
71
+ options = ["gpt-3.5-turbo", "gpt-4"],
72
+ help="Which LLM to use",
73
+ )
74
+
75
+ temperature = st.sidebar.slider(
76
+ label="Temperature",
77
+ min_value=float(0),
78
+ max_value=1.0,
79
+ step=0.1,
80
+ value=0.9,
81
+ help="Set the decoding temperature. Lower temperatures give more predictable outputs."
82
+ )
83
+
84
+ if API_KEY is not None:
85
+ set_api_key(API_KEY)
86
+ chain = get_chain(model_name, temperature)
87
+ if st.button("Clear history"):
88
+ chain.memory.clear()
89
+ display_messages(chain)
90
+ text = st.chat_input()
91
+ if text:
92
+ if text.lower() == "clear":
93
+ chain.memory.clear()
94
+ else:
95
+ result = chain.predict(input=text)
96
+ display_messages(chain)
97
+
98
+
99
+
100
+
101
+
102
+
103
+
104
+
105
+
106
+
107
+
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ langchain
2
+ openai
3
+ streamlit