OuroborosM commited on
Commit
b85e015
·
2 Parent(s): ad9c00b 2549c67

Merge branch 'main' of https://huggingface.co/spaces/STLA-MECH/STLA-BABY

Browse files
Files changed (2) hide show
  1. app.py +52 -24
  2. requirements.txt +2 -1
app.py CHANGED
@@ -65,9 +65,12 @@ import langchain
65
  import asyncio
66
  from playwright.async_api import async_playwright
67
 
68
- os.environ['MPLCONFIGDIR'] = os.path.expanduser('/home/user/matplotlib_cache')
69
 
70
- async def start_playwright(Question: str):
 
 
 
 
71
  pw = await async_playwright().start()
72
  browser = await pw.chromium.launch(headless=True)
73
  # browser = await pw.webkit.launch(headless=True)
@@ -81,7 +84,7 @@ async def start_playwright(Question: str):
81
  # print("Test content: ", await page.locator("//div[@class='css-zt5igj e1nzilvr3']").inner_html())
82
  print("Test content: ", await page.locator("//div[@class='css-zt5igj e1nzilvr3']").inner_text())
83
 
84
- await page.locator("//textarea").fill(Question)
85
  await page.wait_for_timeout(200)
86
  # print("Content of Web: ", await page.content())
87
  # await page.locator("//button[@class='css-1wi2cd3 e1d2x3se3']").click()
@@ -94,8 +97,8 @@ async def start_playwright(Question: str):
94
  output_history = "NOTHING"
95
  for i in range(40):
96
  output_text = await page.locator("//div[@aria-label='Chat message from assistant']").last.inner_text()
97
- # print("output_text... :", output_text)
98
- print("checking...")
99
  if output_text == output_history and '▌' not in output_text:
100
  return output_text
101
  else:
@@ -489,6 +492,12 @@ chat = AzureChatOpenAI(
489
  deployment_name=os.environ["deployment_name"],
490
  temperature=0,
491
  )
 
 
 
 
 
 
492
  llm = chat
493
 
494
  llm_math = LLMMathChain.from_llm(llm)
@@ -507,7 +516,22 @@ tools2 = [DB_Search2(), duckduckgo_tool2, wikipedia_tool2, python_tool2, math_to
507
 
508
  # tools = load_tools(["Vector Database Search","Wikipedia Search","Python REPL","llm-math"], llm=llm)
509
 
510
- embeddings = OpenAIEmbeddings(deployment="model_embedding", chunk_size=15)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
511
 
512
  # embeddings = OpenAIEmbeddings(deployment="model_embedding_2", chunk_size=15)
513
 
@@ -902,32 +926,31 @@ def chathmi4(message, history2):
902
  print("-" * 20)
903
  print("-" * 20)
904
 
905
-
906
- def chatremote(message, historyr):
907
  global last_request
908
  global Filename_Chatbot
909
- print("Input Message Remote:", message)
910
  last_request = message
911
- historyr = historyr + [(message, None)]
912
- yield ["", historyr, gr.update(visible = False), gr.update(visible = True)]
913
  # yield ["", history2, "SUBMIT", "STOP"]
914
  try:
915
  # response = agent.run(message)
916
  response = asyncio.run(start_playwright(message))
917
  time.sleep(0.1)
918
- historyr = historyr + [(None, response)]
919
- yield ["", historyr, gr.update(visible = True), gr.update(visible = False)]
920
  # yield ["", history2, None, None]
921
- print ("response of chatbot with remote:", response)
922
  # real_content = response[-1:]
923
  # print("real_content", real_content)
924
  try:
925
  temp = response.split("(sandbox:/")[1] # (sandbox:/sample-20230805-0807.wav)
926
  file_name = temp.split(")")[0]
927
  print("file_name:", file_name)
928
- historyr = historyr + [(None, (file_name,))]
929
  Filename_Chatbot = file_name
930
- yield ["", historyr, "SUBMIT", "STOP"]
931
  except:
932
  print("No need to add file in chatbot")
933
 
@@ -936,11 +959,10 @@ def chatremote(message, historyr):
936
 
937
  # history = history + [(message, None)]
938
 
939
- print("Historyr: ", historyr)
940
  print("-" * 20)
941
  print("-" * 20)
942
-
943
-
944
  def fake(message, history4):
945
  pass
946
 
@@ -1120,7 +1142,7 @@ with gr.Blocks() as demo:
1120
 
1121
  with gr.Column() as main2:
1122
  title = gr.Markdown("""# <center> STLA BABY - YOUR FRIENDLY GUIDE
1123
- <center> v0.5: Powered by MECH Core Team"""),
1124
  chatbot = gr.Chatbot()
1125
  with gr.Row():
1126
  inputtext = gr.Textbox(
@@ -1183,9 +1205,15 @@ with gr.Blocks() as demo:
1183
  # then(playsound, None, voice_output).\
1184
  # then(HMI_Wait, None, [submit_button, stop_button])
1185
  # inf4 = inputtext.submit(chathmi4, [inputtext, chatbot], [inputtext, chatbot, submit_button, stop_button])
1186
- inf4 = inputtext.submit(chathmi4, [inputtext, chatbot], [inputtext, chatbot, submit_button, stop_button]).\
1187
- success(playsound1, None, voice_output)#.\
 
1188
  # success(ClearAudio, None, voice_output)
 
 
 
 
 
1189
  inf3 = submit_button.click(chathmi3, [inputtext, chatbot], [inputtext, chatbot]).\
1190
  success(HMI_Runing, None, [submit_button, stop_button]).\
1191
  success(playsound1, None, voice_output).\
@@ -1319,8 +1347,8 @@ if __name__ == '__main__':
1319
  # QAQuery("what is COFOR ?")
1320
  # CreatDb_P()
1321
  # QAQuery_p("what is GST ?")
1322
- Question = "what is PDP?"
1323
- asyncio.run(start_playwright(Question))
1324
  if SysLock == "1":
1325
  demo.queue().launch(auth=(username, password), server_name="0.0.0.0", server_port=7860)
1326
  else:
 
65
  import asyncio
66
  from playwright.async_api import async_playwright
67
 
 
68
 
69
+
70
+ from langchain.embeddings.huggingface import HuggingFaceEmbeddings
71
+
72
+
73
+ async def start_playwright(question: str):
74
  pw = await async_playwright().start()
75
  browser = await pw.chromium.launch(headless=True)
76
  # browser = await pw.webkit.launch(headless=True)
 
84
  # print("Test content: ", await page.locator("//div[@class='css-zt5igj e1nzilvr3']").inner_html())
85
  print("Test content: ", await page.locator("//div[@class='css-zt5igj e1nzilvr3']").inner_text())
86
 
87
+ await page.locator("//textarea").fill("question")
88
  await page.wait_for_timeout(200)
89
  # print("Content of Web: ", await page.content())
90
  # await page.locator("//button[@class='css-1wi2cd3 e1d2x3se3']").click()
 
97
  output_history = "NOTHING"
98
  for i in range(40):
99
  output_text = await page.locator("//div[@aria-label='Chat message from assistant']").last.inner_text()
100
+ print("output_text... :")
101
+
102
  if output_text == output_history and '▌' not in output_text:
103
  return output_text
104
  else:
 
492
  deployment_name=os.environ["deployment_name"],
493
  temperature=0,
494
  )
495
+
496
+
497
+
498
+
499
+
500
+
501
  llm = chat
502
 
503
  llm_math = LLMMathChain.from_llm(llm)
 
516
 
517
  # tools = load_tools(["Vector Database Search","Wikipedia Search","Python REPL","llm-math"], llm=llm)
518
 
519
+ # Openai embedding
520
+ embeddings_openai = OpenAIEmbeddings(deployment="model_embedding", chunk_size=15)
521
+
522
+ # huggingface embedding model
523
+ embed_model_id = 'sentence-transformers/all-MiniLM-L6-v2'
524
+
525
+ # device = f'cuda:{cuda.current_device()}' if cuda.is_available() else 'cpu'
526
+ device = 'cpu'
527
+ # embed_model = HuggingFaceEmbeddings(
528
+ # model_name=embed_model_id,
529
+ # model_kwargs={'device': device},
530
+ # encode_kwargs={'device': device, 'batch_size': 32}
531
+ # )
532
+
533
+ embeddings = embeddings_openai
534
+
535
 
536
  # embeddings = OpenAIEmbeddings(deployment="model_embedding_2", chunk_size=15)
537
 
 
926
  print("-" * 20)
927
  print("-" * 20)
928
 
929
+ def chatremote(message, history2):
 
930
  global last_request
931
  global Filename_Chatbot
932
+ print("Input Message:", message)
933
  last_request = message
934
+ history2 = history2 + [(message, None)]
935
+ yield ["", history2, gr.update(visible = False), gr.update(visible = True)]
936
  # yield ["", history2, "SUBMIT", "STOP"]
937
  try:
938
  # response = agent.run(message)
939
  response = asyncio.run(start_playwright(message))
940
  time.sleep(0.1)
941
+ history2 = history2 + [(None, response)]
942
+ yield ["", history2, gr.update(visible = True), gr.update(visible = False)]
943
  # yield ["", history2, None, None]
944
+ print ("response of chatbot remote:", response)
945
  # real_content = response[-1:]
946
  # print("real_content", real_content)
947
  try:
948
  temp = response.split("(sandbox:/")[1] # (sandbox:/sample-20230805-0807.wav)
949
  file_name = temp.split(")")[0]
950
  print("file_name:", file_name)
951
+ history2 = history2 + [(None, (file_name,))]
952
  Filename_Chatbot = file_name
953
+ yield ["", history2, "SUBMIT", "STOP"]
954
  except:
955
  print("No need to add file in chatbot")
956
 
 
959
 
960
  # history = history + [(message, None)]
961
 
962
+ print("History2: ", history2)
963
  print("-" * 20)
964
  print("-" * 20)
965
+
 
966
  def fake(message, history4):
967
  pass
968
 
 
1142
 
1143
  with gr.Column() as main2:
1144
  title = gr.Markdown("""# <center> STLA BABY - YOUR FRIENDLY GUIDE
1145
+ <center> v0.6: Powered by MECH Core Team - GPT4 REMOTE MODE"""),
1146
  chatbot = gr.Chatbot()
1147
  with gr.Row():
1148
  inputtext = gr.Textbox(
 
1205
  # then(playsound, None, voice_output).\
1206
  # then(HMI_Wait, None, [submit_button, stop_button])
1207
  # inf4 = inputtext.submit(chathmi4, [inputtext, chatbot], [inputtext, chatbot, submit_button, stop_button])
1208
+ ''' open ai '''
1209
+ # inf4 = inputtext.submit(chathmi4, [inputtext, chatbot], [inputtext, chatbot, submit_button, stop_button]).\
1210
+ # success(playsound1, None, voice_output)#.\
1211
  # success(ClearAudio, None, voice_output)
1212
+
1213
+ ''' GPT4 Remote '''
1214
+ inf4 = inputtext.submit(chatremote, [inputtext, chatbot], [inputtext, chatbot, submit_button, stop_button]).\
1215
+ success(playsound1, None, voice_output)
1216
+
1217
  inf3 = submit_button.click(chathmi3, [inputtext, chatbot], [inputtext, chatbot]).\
1218
  success(HMI_Runing, None, [submit_button, stop_button]).\
1219
  success(playsound1, None, voice_output).\
 
1347
  # QAQuery("what is COFOR ?")
1348
  # CreatDb_P()
1349
  # QAQuery_p("what is GST ?")
1350
+ question = "what is PDP?"
1351
+ output = asyncio.run(start_playwright(question))
1352
  if SysLock == "1":
1353
  demo.queue().launch(auth=(username, password), server_name="0.0.0.0", server_port=7860)
1354
  else:
requirements.txt CHANGED
@@ -14,4 +14,5 @@ pygame
14
  unstructured
15
  openpyxl
16
  python-docx
17
- playwright
 
 
14
  unstructured
15
  openpyxl
16
  python-docx
17
+ playwright
18
+ sentence_transformers