Spaces:
Sleeping
Sleeping
arslan-ahmed
commited on
Commit
•
918a154
1
Parent(s):
bec46de
added user modes
Browse files- app.py +21 -20
- ttyd_consts.py +44 -4
app.py
CHANGED
@@ -27,18 +27,13 @@ from ttyd_consts import *
|
|
27 |
|
28 |
###############################################################################################
|
29 |
|
30 |
-
# You want to hardcode Documents or take it from UI?
|
31 |
-
UiAddData = True
|
32 |
|
33 |
-
|
34 |
-
|
35 |
|
36 |
-
|
37 |
-
url_list = ['https://www.nustianusa.org', 'https://www.nustian.ca']
|
38 |
# local vector store as opposed to gradio state vector store
|
39 |
-
vsDict_hard = localData_vecStore(os.getenv("OPENAI_API_KEY"), url_list=url_list)
|
40 |
-
md_title = md_title_nustian
|
41 |
-
|
42 |
|
43 |
###############################################################################################
|
44 |
|
@@ -66,20 +61,23 @@ def initializeChatbot(temp, k, modelName, stdlQs, api_key_st, vsDict_st, progres
|
|
66 |
qa_chain_st = updateQaChain(temp, k, modelName, stdlQs, api_key_st, vsDict_st)
|
67 |
progress(0.5, waitText_initialize)
|
68 |
#generate welcome message
|
69 |
-
|
|
|
|
|
|
|
70 |
|
71 |
# exSamples = generateExamples(api_key_st, vsDict_st)
|
72 |
# exSamples_vis = True if exSamples[0] else False
|
73 |
|
74 |
return qa_chain_st, btn.update(interactive=True), initChatbot_btn.update('Chatbot ready. Now visit the chatbot Tab.', interactive=False)\
|
75 |
-
, aKey_tb.update(), gr.Tabs.update(selected='cb'), chatbot.update(value=[('',
|
76 |
|
77 |
|
78 |
def setApiKey(api_key):
|
79 |
if api_key==os.getenv("TEMP_PWD") and os.getenv("OPENAI_API_KEY") is not None:
|
80 |
api_key=os.getenv("OPENAI_API_KEY")
|
81 |
try:
|
82 |
-
|
83 |
openai.Model.list(api_key=api_key) # test the API key
|
84 |
api_key_st = api_key
|
85 |
|
@@ -108,7 +106,7 @@ def uiData_vecStore(userFiles, userUrls, api_key_st, vsDict_st={}, progress=gr.P
|
|
108 |
docs = split_docs(documents)
|
109 |
# Embeddings
|
110 |
try:
|
111 |
-
|
112 |
openai.Model.list(api_key=api_key_st) # test the API key
|
113 |
embeddings = OpenAIEmbeddings(openai_api_key=api_key_st)
|
114 |
except Exception as e:
|
@@ -126,7 +124,7 @@ def uiData_vecStore(userFiles, userUrls, api_key_st, vsDict_st={}, progress=gr.P
|
|
126 |
# just update the QA Chain, no updates to any UI
|
127 |
def updateQaChain(temp, k, modelName, stdlQs, api_key_st, vsDict_st):
|
128 |
# if we are not adding data from ui, then use vsDict_hard as vectorstore
|
129 |
-
if vsDict_st=={} and
|
130 |
modelName = modelName.split('(')[0].strip() # so we can provide any info in brackets
|
131 |
# check if the input model is chat model or legacy model
|
132 |
try:
|
@@ -183,7 +181,7 @@ with gr.Blocks(theme=gr.themes.Default(primary_hue='orange', secondary_hue='gray
|
|
183 |
|
184 |
|
185 |
# Setup the Gradio Layout
|
186 |
-
gr.Markdown(
|
187 |
with gr.Tabs() as tabs:
|
188 |
with gr.Tab('Initialization', id='init'):
|
189 |
with gr.Row():
|
@@ -192,13 +190,13 @@ with gr.Blocks(theme=gr.themes.Default(primary_hue='orange', secondary_hue='gray
|
|
192 |
, info='You can find OpenAI API key at https://platform.openai.com/account/api-keys'\
|
193 |
, placeholder='Enter your API key here and hit enter to begin chatting')
|
194 |
aKey_btn = gr.Button("Submit API Key")
|
195 |
-
with gr.Row(visible=
|
196 |
upload_fb = gr.Files(scale=5, label="Upload (multiple) Files - pdf/txt/docx supported", file_types=['.doc', '.docx', 'text', '.pdf', '.csv'])
|
197 |
urls_tb = gr.Textbox(scale=5, label="Enter URLs starting with https (comma separated)"\
|
198 |
, info=url_tb_info\
|
199 |
, placeholder=url_tb_ph)
|
200 |
data_ingest_btn = gr.Button("Load Data")
|
201 |
-
status_tb = gr.TextArea(label='Status bar', show_label=False, visible=
|
202 |
initChatbot_btn = gr.Button("Initialize Chatbot", variant="primary")
|
203 |
|
204 |
with gr.Tab('Chatbot', id='cb'):
|
@@ -215,7 +213,7 @@ with gr.Blocks(theme=gr.themes.Default(primary_hue='orange', secondary_hue='gray
|
|
215 |
with gr.Row():
|
216 |
with gr.Column():
|
217 |
temp_sld = gr.Slider(minimum=0, maximum=1, step=0.1, value=0.7, label="Temperature", info='Sampling temperature to use when calling LLM. Defaults to 0.7')
|
218 |
-
k_sld = gr.Slider(minimum=1, maximum=10, step=1, value=
|
219 |
model_dd = gr.Dropdown(label='Model Name'\
|
220 |
, choices=model_dd_choices\
|
221 |
, value=model_dd_choices[0], allow_custom_value=True\
|
@@ -240,9 +238,12 @@ with gr.Blocks(theme=gr.themes.Default(primary_hue='orange', secondary_hue='gray
|
|
240 |
k_sld.release(**advSet_args)
|
241 |
model_dd.change(**advSet_args)
|
242 |
stdlQs_rb.change(**advSet_args)
|
243 |
-
|
244 |
# Initialize button
|
245 |
-
|
|
|
|
|
|
|
246 |
|
247 |
# Chatbot submit button
|
248 |
chat_btn_args = {'fn':respond, 'inputs':[msg, chatbot, qa_state], 'outputs':[msg, chatbot, srcDocs, btn]}
|
|
|
27 |
|
28 |
###############################################################################################
|
29 |
|
|
|
|
|
30 |
|
31 |
+
# selct the mode from ttyd_consts.py
|
32 |
+
mode = mode_general
|
33 |
|
34 |
+
if mode.name!='general':
|
|
|
35 |
# local vector store as opposed to gradio state vector store
|
36 |
+
vsDict_hard = localData_vecStore(os.getenv("OPENAI_API_KEY"), inputDir=mode.inputDir, file_list=mode.file_list, url_list=mode.url_list)
|
|
|
|
|
37 |
|
38 |
###############################################################################################
|
39 |
|
|
|
61 |
qa_chain_st = updateQaChain(temp, k, modelName, stdlQs, api_key_st, vsDict_st)
|
62 |
progress(0.5, waitText_initialize)
|
63 |
#generate welcome message
|
64 |
+
if mode.welcomeMsg:
|
65 |
+
welMsg = mode.welcomeMsg
|
66 |
+
else:
|
67 |
+
welMsg = qa_chain_st({'question': initialize_prompt, 'chat_history':[]})['answer']
|
68 |
|
69 |
# exSamples = generateExamples(api_key_st, vsDict_st)
|
70 |
# exSamples_vis = True if exSamples[0] else False
|
71 |
|
72 |
return qa_chain_st, btn.update(interactive=True), initChatbot_btn.update('Chatbot ready. Now visit the chatbot Tab.', interactive=False)\
|
73 |
+
, aKey_tb.update(), gr.Tabs.update(selected='cb'), chatbot.update(value=[('', welMsg)])
|
74 |
|
75 |
|
76 |
def setApiKey(api_key):
|
77 |
if api_key==os.getenv("TEMP_PWD") and os.getenv("OPENAI_API_KEY") is not None:
|
78 |
api_key=os.getenv("OPENAI_API_KEY")
|
79 |
try:
|
80 |
+
api_key='Null' if api_key is None or api_key=='' else api_key
|
81 |
openai.Model.list(api_key=api_key) # test the API key
|
82 |
api_key_st = api_key
|
83 |
|
|
|
106 |
docs = split_docs(documents)
|
107 |
# Embeddings
|
108 |
try:
|
109 |
+
api_key_st='Null' if api_key_st is None or api_key_st=='' else api_key_st
|
110 |
openai.Model.list(api_key=api_key_st) # test the API key
|
111 |
embeddings = OpenAIEmbeddings(openai_api_key=api_key_st)
|
112 |
except Exception as e:
|
|
|
124 |
# just update the QA Chain, no updates to any UI
|
125 |
def updateQaChain(temp, k, modelName, stdlQs, api_key_st, vsDict_st):
|
126 |
# if we are not adding data from ui, then use vsDict_hard as vectorstore
|
127 |
+
if vsDict_st=={} and mode.name!='general': vsDict_st=vsDict_hard
|
128 |
modelName = modelName.split('(')[0].strip() # so we can provide any info in brackets
|
129 |
# check if the input model is chat model or legacy model
|
130 |
try:
|
|
|
181 |
|
182 |
|
183 |
# Setup the Gradio Layout
|
184 |
+
gr.Markdown(mode.title)
|
185 |
with gr.Tabs() as tabs:
|
186 |
with gr.Tab('Initialization', id='init'):
|
187 |
with gr.Row():
|
|
|
190 |
, info='You can find OpenAI API key at https://platform.openai.com/account/api-keys'\
|
191 |
, placeholder='Enter your API key here and hit enter to begin chatting')
|
192 |
aKey_btn = gr.Button("Submit API Key")
|
193 |
+
with gr.Row(visible=mode.uiAddDataVis):
|
194 |
upload_fb = gr.Files(scale=5, label="Upload (multiple) Files - pdf/txt/docx supported", file_types=['.doc', '.docx', 'text', '.pdf', '.csv'])
|
195 |
urls_tb = gr.Textbox(scale=5, label="Enter URLs starting with https (comma separated)"\
|
196 |
, info=url_tb_info\
|
197 |
, placeholder=url_tb_ph)
|
198 |
data_ingest_btn = gr.Button("Load Data")
|
199 |
+
status_tb = gr.TextArea(label='Status bar', show_label=False, visible=mode.uiAddDataVis)
|
200 |
initChatbot_btn = gr.Button("Initialize Chatbot", variant="primary")
|
201 |
|
202 |
with gr.Tab('Chatbot', id='cb'):
|
|
|
213 |
with gr.Row():
|
214 |
with gr.Column():
|
215 |
temp_sld = gr.Slider(minimum=0, maximum=1, step=0.1, value=0.7, label="Temperature", info='Sampling temperature to use when calling LLM. Defaults to 0.7')
|
216 |
+
k_sld = gr.Slider(minimum=1, maximum=10, step=1, value=mode.k, label="K", info='Number of relavant documents to return from Vector Store. Defaults to 4')
|
217 |
model_dd = gr.Dropdown(label='Model Name'\
|
218 |
, choices=model_dd_choices\
|
219 |
, value=model_dd_choices[0], allow_custom_value=True\
|
|
|
238 |
k_sld.release(**advSet_args)
|
239 |
model_dd.change(**advSet_args)
|
240 |
stdlQs_rb.change(**advSet_args)
|
241 |
+
|
242 |
# Initialize button
|
243 |
+
initCb_args = {'fn':initializeChatbot, 'inputs':[temp_sld, k_sld, model_dd, stdlQs_rb, api_key_state, chromaVS_state], 'outputs':[qa_state, btn, initChatbot_btn, aKey_tb, tabs, chatbot]}
|
244 |
+
if mode.loadUi=='chatbot':
|
245 |
+
demo.load(**initCb_args) # load Chatbot UI directly on startup
|
246 |
+
initChatbot_btn.click(**initCb_args)
|
247 |
|
248 |
# Chatbot submit button
|
249 |
chat_btn_args = {'fn':respond, 'inputs':[msg, chatbot, qa_state], 'outputs':[msg, chatbot, srcDocs, btn]}
|
ttyd_consts.py
CHANGED
@@ -2,9 +2,9 @@ exp_query = 'Generate top 5 questions that I can ask about this data. Questions
|
|
2 |
|
3 |
waitText_initialize = 'Preparing the documents, please wait...'
|
4 |
|
5 |
-
initialize_prompt = 'Write a short welcome message to the user. Describe the documents with a
|
6 |
-
|
7 |
-
|
8 |
|
9 |
nustian_exps = ['Tell me about NUSTIAN',
|
10 |
'Who is the NUSTIAN regional lead for Silicon Valley?',
|
@@ -47,4 +47,44 @@ md_title_nustian = """
|
|
47 |
Step 2) Click Initialize Chatbot to start sending messages.<br>
|
48 |
|
49 |
You may also play around with Advanced Settings, like changing the model name and parameters.
|
50 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
waitText_initialize = 'Preparing the documents, please wait...'
|
4 |
|
5 |
+
initialize_prompt = 'Write a short welcome message to the user. Describe the documents with a comprehensive overview including short summary.\
|
6 |
+
If these documents are about a person, mention his name instead of using pronouns. After describing the overview, you should mention top 3 example questions that the user can ask about this data.\
|
7 |
+
\n\nYour response should be short and precise. Format of your response should be Summary:\n{Description and Summary} \n\n Example Questions:\n{Example Questions}'
|
8 |
|
9 |
nustian_exps = ['Tell me about NUSTIAN',
|
10 |
'Who is the NUSTIAN regional lead for Silicon Valley?',
|
|
|
47 |
Step 2) Click Initialize Chatbot to start sending messages.<br>
|
48 |
|
49 |
You may also play around with Advanced Settings, like changing the model name and parameters.
|
50 |
+
"""
|
51 |
+
|
52 |
+
md_title_arslan = """
|
53 |
+
## Talk to Arslan<br>
|
54 |
+
Welcome to Arslan Ahmed's Chatbot!<br>
|
55 |
+
This is LLM-based question-answer application built using Retrieval Augmented Generation (RAG) approach with Langchain, implementing Generative AI technology.\
|
56 |
+
He has developed this application to help people get quick answers on frequently asked questions and topics, rather than waiting for his personal reply.\
|
57 |
+
Currently, this chatbot is trained on Arslan's resume and LinkedIn profile, with plans to incorporate additional data in the future.<br><br>
|
58 |
+
By default, this chatbot is powered by OpenAI's Large Language Model gpt-3.5-turbo. For those interested to explore, there are options under Advanced Settings to change the model and its parameters.
|
59 |
+
"""
|
60 |
+
|
61 |
+
|
62 |
+
welcomeMsgArslan = """Summary: The document provides a comprehensive overview of Arslan Ahmed\'s professional background and expertise as a data scientist.\
|
63 |
+
It highlights his experience in various industries and his proficiency in a wide range of data analysis tools and techniques.\
|
64 |
+
The document also mentions his involvement in research projects, publications, and academic achievements.\
|
65 |
+
\n\nExample Questions:
|
66 |
+
1. What are some of the key projects that Arslan has worked on as a data scientist?
|
67 |
+
2. What tools and technologies did Arslan Ahmed utilize in his data science work at IBM?
|
68 |
+
3. Tell me about Arslan's educational background.
|
69 |
+
"""
|
70 |
+
|
71 |
+
|
72 |
+
class TtydMode():
|
73 |
+
def __init__(self, name='', title='', ui='initialize', type='', dir=None, files=[], urls=[], vis=False, welMsg='', def_k=4):
|
74 |
+
self.name = name
|
75 |
+
self.title = title
|
76 |
+
self.loadUi = ui
|
77 |
+
self.type = type
|
78 |
+
self.inputDir=dir
|
79 |
+
self.file_list=files
|
80 |
+
self.url_list=urls
|
81 |
+
self.uiAddDataVis = vis
|
82 |
+
self.welcomeMsg = welMsg
|
83 |
+
self.k = def_k
|
84 |
+
|
85 |
+
|
86 |
+
|
87 |
+
|
88 |
+
mode_general = TtydMode(name='general', title=md_title_general, vis=True)
|
89 |
+
mode_nustian = TtydMode(name='nustian', title=md_title_nustian, urls=['https://nustianusa.org', 'https://nustian.ca'])
|
90 |
+
mode_arslan = TtydMode(name='arslan', ui='chatbot', title=md_title_arslan, dir='./documents/', welMsg=welcomeMsgArslan, def_k=8)
|