chriscelaya commited on
Commit
b1f6b96
·
1 Parent(s): d0527d7

initial commit

Browse files
Files changed (1) hide show
  1. app.py +181 -0
app.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from typing import Optional, Tuple
3
+
4
+ import gradio as gr
5
+ from langchain.llms import OpenAIChat
6
+ from langchain import PromptTemplate
7
+ from langchain.chains import ConversationChain
8
+ from langchain.chains.conversation.memory import ConversationBufferMemory
9
+ from threading import Lock
10
+
11
+
12
+ def load_chain():
13
+ prefix_messages = [
14
+ {
15
+ "role": "system",
16
+ "content": "You are a helpful assistant who is very good at problem solving and thinks step by step. You are about to receive a complex set of instructions to follow for the remainder of the conversation. Good luck!"
17
+ }
18
+ ]
19
+
20
+ llm = OpenAIChat(model_name="gpt-3.5-turbo-0301", temperature=0.8, prefix_messages=prefix_messages)
21
+
22
+ prompt = PromptTemplate(
23
+ input_variables=['history', 'input'],
24
+ output_parser=None,
25
+ template='Current conversation:\n{history}\n\nUser: """""\n{input}"""""\n\nAssistant: ',
26
+ template_format='f-string'
27
+ )
28
+
29
+ chain = ConversationChain(
30
+ llm=llm,
31
+ prompt=prompt,
32
+ memory=ConversationBufferMemory(human_prefix="User", ai_prefix="Assistant")
33
+ )
34
+
35
+ return chain
36
+
37
+
38
+ def load_prompt(prompt_selection: str):
39
+ """Load the selected initializing prompt."""
40
+ path = f"prompts/{prompt_selection}/prompt.txt"
41
+
42
+ with open(path, "r") as f:
43
+ init_prompt = f.read()
44
+ print(f"Loading {path.split('/')[-2]} from: {path}...") # e.g. Loading proposal-gen from: prompts/work/proposal-gen/prompt.txt
45
+
46
+ chain = load_chain()
47
+ chain.predict(input=init_prompt)
48
+ print(f"Done! Loaded {len(chain.memory.buffer)} characters.")
49
+ return chain
50
+
51
+ def fetch_prompts():
52
+ """Iterates recursively through the prompts directory, returning a list of prompts.
53
+
54
+ This is used to populate the dropdown menu in the Gradio interface.
55
+ """
56
+ available_prompts = []
57
+ for root, dirs, files in os.walk("prompts"):
58
+ if "prompt.txt" in files:
59
+ available_prompts.append(root.replace("prompts/", "").replace("/prompt.txt", "")) # remove the "prompts/" prefix and the "/prompt.txt" suffix
60
+ available_prompts.sort()
61
+
62
+ return available_prompts
63
+
64
+ def set_openai_api_key(api_key: str):
65
+ """Set the api key and return chain.
66
+
67
+ If no api_key, then None is returned.
68
+ """
69
+ if api_key:
70
+ os.environ["OPENAI_API_KEY"] = api_key
71
+ print("API key set.")
72
+ # chain = load_chain() # loads the chain.
73
+ chain = load_prompt(selected_prompt.value)
74
+ # os.environ["OPENAI_API_KEY"] = ""
75
+ return chain
76
+
77
+ class ChatWrapper:
78
+
79
+ def __init__(self):
80
+ self.lock = Lock()
81
+ def __call__(
82
+ self, api_key: str, inp: str, history: Optional[Tuple[str, str]], chain: Optional[ConversationChain]
83
+ ):
84
+ """Execute the chat functionality."""
85
+ self.lock.acquire()
86
+ try:
87
+ history = history or []
88
+ # If chain is None, that is because no API key was provided.
89
+ if chain is None:
90
+ history.append((inp, "Please paste your OpenAI key to use"))
91
+ return history, history
92
+
93
+ # Set OpenAI key
94
+ import openai
95
+ openai.api_key = api_key
96
+
97
+ # Run chain and append input.
98
+ output = chain.run(input=inp)
99
+ history.append((inp, output))
100
+ except Exception as e:
101
+ raise e
102
+ finally:
103
+ self.lock.release()
104
+ return history, history
105
+
106
+ chat = ChatWrapper()
107
+
108
+ block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
109
+
110
+ with block:
111
+ with gr.Row():
112
+ gr.Markdown("<h2><center>PromptLib</center></h2>")
113
+
114
+ with gr.Tab('Prompt'):
115
+
116
+ selected_prompt = gr.Dropdown(
117
+ choices=fetch_prompts(),
118
+ type="value",
119
+ value="work/proposal-gen",
120
+ label="Base prompt",
121
+ interactive=True
122
+ )
123
+
124
+ reload_prompt= gr.Button(
125
+ value="Reload",
126
+ variant="secondary"
127
+ )
128
+
129
+ with gr.Tab('API Key'):
130
+ openai_api_key_textbox = gr.Textbox(
131
+ placeholder="Paste your OpenAI API key (sk-...)",
132
+ show_label=False,
133
+ lines=1,
134
+ type="password",
135
+ )
136
+
137
+ chatbot = gr.Chatbot()
138
+
139
+ with gr.Row():
140
+ message = gr.Textbox(
141
+ label="Message",
142
+ placeholder="What's the answer to life, the universe, and everything?",
143
+ lines=1,
144
+ )
145
+ submit = gr.Button(value="Send", variant="secondary").style(full_width=False)
146
+
147
+ gr.Examples(
148
+ examples=[
149
+ "What can you do? What command(s) are available?",
150
+ "Please suggest some sample commands.",
151
+ ],
152
+ inputs=message,
153
+ )
154
+
155
+ gr.HTML(
156
+ "<center>Josh Pazmino | <a href='https://github.com/jmpaz'>GitHub</a> • <a href='https://twitter.com/fjpaz_'>Twitter</a> • <a href='https://linkedin.com/in/fjpazmino'>LinkedIn</a></center>"
157
+ )
158
+
159
+ state = gr.State()
160
+ agent_state = gr.State()
161
+
162
+ submit.click(chat, inputs=[openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state])
163
+ message.submit(chat, inputs=[openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state])
164
+
165
+
166
+ openai_api_key_textbox.change(
167
+ set_openai_api_key,
168
+ inputs=[openai_api_key_textbox],
169
+ outputs=[agent_state],
170
+ )
171
+
172
+ selected_prompt.change(
173
+ load_prompt,
174
+ inputs=[selected_prompt],
175
+ outputs=[agent_state]
176
+ )
177
+
178
+ reload_prompt.click(load_prompt, inputs=[selected_prompt], outputs=[agent_state])
179
+
180
+
181
+ block.launch(debug=True)