ShawnAI commited on
Commit
a320d56
1 Parent(s): f6c1b0a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +183 -0
app.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import random
3
+ import time
4
+
5
+ from langchain.chat_models import ChatOpenAI
6
+ from langchain.embeddings import HuggingFaceEmbeddings
7
+ from langchain.vectorstores import Pinecone
8
+ from langchain.chains.retrieval_qa.base import RetrievalQA
9
+ from langchain.chains.question_answering import load_qa_chain
10
+ import pinecone
11
+
12
+ import os
13
+ os.environ["TOKENIZERS_PARALLELISM"] = "false"
14
+
15
+ OPENAI_KEY = ""
16
+ OPENAI_TEMP = 0
17
+ PINECONE_KEY = os.environ["PINECONE_KEY"]
18
+ PINECONE_ENV = "asia-northeast1-gcp"
19
+ PINECONE_INDEX = "3gpp"
20
+
21
+ # return top-k text chunk from vector store
22
+ VECTOR_SEARCH_TOP_K = 10
23
+
24
+ # LLM input history length
25
+ LLM_HISTORY_LEN = 3
26
+
27
+
28
+ BUTTON_MIN_WIDTH = 150
29
+
30
+ MODEL_STATUS = "Wait for API Key to Initialize."
31
+
32
+ MODEL_LOADED = "Model Loaded"
33
+
34
+ MODEL_WARNING = "Please paste your OpenAI API Key from openai.com to initialize this application!"
35
+
36
+
37
+ webui_title = """
38
+ # 3GPP OpenAI Chatbot for Hackathon Demo
39
+
40
+ """
41
+
42
+ init_message = """Welcome to use 3GPP Chatbot
43
+ This demo toolkit is based on OpenAI with langchain and pinecone
44
+ Please insert your question and click 'Submit'
45
+ """
46
+
47
+
48
+ def init_model(openai_key):
49
+ try:
50
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
51
+
52
+ pinecone.init(api_key = PINECONE_KEY,
53
+ environment = PINECONE_ENV)
54
+
55
+ llm = ChatOpenAI(temperature = OPENAI_TEMP,
56
+ openai_api_key = openai_key)
57
+
58
+ global db
59
+ db = Pinecone.from_existing_index(index_name = PINECONE_INDEX,
60
+ embedding = embeddings)
61
+ global chain
62
+ chain = load_qa_chain(llm, chain_type="stuff")
63
+
64
+ global MODEL_STATUS
65
+ MODEL_STATUS = MODEL_LOADED
66
+
67
+ return openai_key, ""
68
+ except Exception as e:
69
+ print(e)
70
+ return "",""
71
+
72
+ def get_chat_history(inputs) -> str:
73
+ res = []
74
+ for human, ai in inputs:
75
+ res.append(f"Human: {human}\nAI: {ai}")
76
+ return "\n".join(res)
77
+
78
+ css = """.bigbox {
79
+ min-height:200px;
80
+ }"""
81
+
82
+ with gr.Blocks(css=css) as demo:
83
+
84
+ gr.Markdown(webui_title)
85
+ gr.Markdown(init_message)
86
+
87
+ if OPENAI_KEY and OPENAI_KEY.startswith("sk-") and len(OPENAI_KEY) > 50:
88
+ api_textbox_ph = "API Founded in Environment Variable: sk-..." + OPENAI_KEY[-4:]
89
+ api_textbox_edit = False
90
+ init_model(OPENAI_KEY)
91
+ else:
92
+ api_textbox_ph = "Paste Your OpenAI API Key (sk-...) and Hit ENTER"
93
+ api_textbox_edit = True
94
+
95
+ api_textbox = gr.Textbox(placeholder = api_textbox_ph,
96
+ interactive = api_textbox_edit,
97
+ show_label=False, lines=1, type='password')
98
+
99
+
100
+ with gr.Tab("Chatbot"):
101
+ with gr.Row():
102
+ with gr.Column(scale=10):
103
+ chatbot = gr.Chatbot(elem_classes="bigbox")
104
+ '''
105
+ with gr.Column(scale=1, min_width=BUTTON_MIN_WIDTH):
106
+ temp = gr.Slider(0,
107
+ 2,
108
+ value=OPENAI_TEMP,
109
+ step=0.1,
110
+ label="temperature",
111
+ interactive=True)
112
+ init = gr.Button("Init")
113
+ '''
114
+ with gr.Row():
115
+ with gr.Column(scale=10):
116
+ query = gr.Textbox(label="Question:",
117
+ lines=2)
118
+ ref = gr.Textbox(label="Reference(optional):")
119
+ with gr.Column(scale=1, min_width=BUTTON_MIN_WIDTH):
120
+ clear = gr.Button("Clear")
121
+ submit = gr.Button("Submit",variant="primary")
122
+
123
+
124
+ with gr.Tab("Details"):
125
+ top_k = gr.Slider(1,
126
+ 20,
127
+ value=VECTOR_SEARCH_TOP_K,
128
+ step=1,
129
+ label="Vector similarity top_k",
130
+ interactive=True)
131
+ detail_panel = gr.Chatbot(label="Related Docs")
132
+
133
+
134
+ def user(user_message, history):
135
+ return "", history+[[user_message, None]]
136
+
137
+ def bot(box_message, ref_message, top_k):
138
+ if MODEL_STATUS != MODEL_LOADED:
139
+ box_message[-1][1] = MODEL_WARNING
140
+ return box_message, "", ""
141
+
142
+ # bot_message = random.choice(["Yes", "No"])
143
+ # 0 is user question, 1 is bot response
144
+ question = box_message[-1][0]
145
+ history = box_message[:-1]
146
+
147
+ if not ref_message:
148
+ ref_message = question
149
+ details = f"Q: {question}"
150
+ else:
151
+ details = f"Q: {question}\nR: {ref_message}"
152
+
153
+ #print(question, ref_message)
154
+ #print(history)
155
+ #print(get_chat_history(history))
156
+
157
+ docsearch = db.as_retriever(search_kwargs={'k':top_k})
158
+ docs = docsearch.get_relevant_documents(ref_message)
159
+ all_output = chain({"input_documents": docs,
160
+ "question": question,
161
+ "chat_history": get_chat_history(history)})
162
+ bot_message = all_output['output_text']
163
+ #print(docs)
164
+
165
+ source = "".join([f"""<details> <summary>{doc.metadata["source"]}</summary>
166
+ {doc.page_content}
167
+
168
+ </details>""" for i, doc in enumerate(docs)])
169
+
170
+ #print(source)
171
+
172
+ box_message[-1][1] = bot_message
173
+ return box_message, "", [[details, source]]
174
+
175
+ submit.click(user, [query, chatbot], [query, chatbot], queue=False).then(
176
+ bot, [chatbot, ref, top_k], [chatbot, ref, detail_panel]
177
+ )
178
+ api_textbox.submit(init_model, api_textbox, [api_textbox, chatbot])
179
+ clear.click(lambda: (None,None,None), None, [query, ref, chatbot], queue=False)
180
+
181
+ if __name__ == "__main__":
182
+ demo.launch(share=False, inbrowser=True)
183
+