alemarino2025 commited on
Commit
1c80c9f
·
verified ·
1 Parent(s): e6bf77b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -181,9 +181,9 @@ conversation_history = []
181
  def predict(user_input, timeout_seconds=1800): # 30 minutes = 1800 seconds
182
 
183
  filter = "/content/drive/My Drive/Colab Notebooks/medical/Conoscenze-unito.pdf"
184
- relevant_document_chunks = vectorstore.similarity_search(user_input, k=10,filter={"source": filter})
185
  context_list = [d.page_content + "\ ###Page: " + str(d.metadata['page']) + "\n\n " for d in relevant_document_chunks]
186
- context_for_query = ".".join(context_list) + "this is all thhe context I have"
187
 
188
  global conversation_history
189
 
@@ -232,10 +232,11 @@ def predict(user_input, timeout_seconds=1800): # 30 minutes = 1800 seconds
232
  # if user_input.lower() in ["quit", "exit"]:
233
  # break
234
 
 
235
 
236
- prediction = bot_response.choices[0].message.content.strip()
237
 
238
- save_feedback(user_input, context_for_query, prediction)
239
 
240
  #except Exception as e:
241
  # prediction = str(e)
@@ -244,7 +245,7 @@ def predict(user_input, timeout_seconds=1800): # 30 minutes = 1800 seconds
244
  # while writing to the log file, ensure that the commit scheduler is locked to avoid parallel
245
  # access
246
 
247
- return prediction
248
 
249
  # Set-up the Gradio UI
250
  # Add text box.
 
181
  def predict(user_input, timeout_seconds=1800): # 30 minutes = 1800 seconds
182
 
183
  filter = "/content/drive/My Drive/Colab Notebooks/medical/Conoscenze-unito.pdf"
184
+ relevant_document_chunks = vectorstore.similarity_search(user_input, k=10)
185
  context_list = [d.page_content + "\ ###Page: " + str(d.metadata['page']) + "\n\n " for d in relevant_document_chunks]
186
+ context_for_query = ".".join(context_list) + "this is all the context I have"
187
 
188
  global conversation_history
189
 
 
232
  # if user_input.lower() in ["quit", "exit"]:
233
  # break
234
 
235
+ save_feedback(user_input, context_for_query, prediction)
236
 
237
+ prediction = response.choices[0].message.content.strip()
238
 
239
+
240
 
241
  #except Exception as e:
242
  # prediction = str(e)
 
245
  # while writing to the log file, ensure that the commit scheduler is locked to avoid parallel
246
  # access
247
 
248
+ return prediction, context_for_query
249
 
250
  # Set-up the Gradio UI
251
  # Add text box.