UnnamedUnknownx1234987789489 commited on
Commit
40b7f12
1 Parent(s): 3f69e55

Update functions.py

Browse files
Files changed (1) hide show
  1. functions.py +30 -18
functions.py CHANGED
@@ -91,38 +91,50 @@ def create_retriever_from_chroma(vectorstore_path="docs/chroma/", search_type='m
91
  return retriever
92
 
93
 
94
- async def handle_userinput(user_question, custom_graph):
95
  # Add the user's question to the chat history and display it in the UI
96
  st.session_state.messages.append({"role": "user", "content": user_question})
97
  st.chat_message("user").write(user_question)
98
 
99
- # Config setup (if required for the graph)
100
  config = {"configurable": {"thread_id": str(uuid.uuid4())}}
101
 
102
  try:
103
- # Await the asynchronous invocation of the custom graph
104
- state_dict = await custom_graph.ainvoke({"question": user_question, "steps": []}, config)
 
 
105
 
106
- # Extract documents from the state dictionary
107
- docs = state_dict.get("documents", [])
108
  with st.sidebar:
109
  st.subheader("Dokumentai, kuriuos Birutė gavo kaip kontekstą")
110
- with st.spinner("Kraunama..."):
111
  for doc in docs:
112
- # Display each document
113
- st.write(f"Dokumentas: {doc}")
114
-
115
- # Check for and display the assistant's response
116
- response = state_dict.get("generation")
117
- if response:
 
 
 
 
 
 
 
 
 
 
118
  st.session_state.messages.append({"role": "assistant", "content": response})
119
  st.chat_message("assistant").write(response)
120
-
121
-
 
122
  except Exception as e:
123
  # Display an error message in case of failure
124
  st.chat_message("assistant").write("Klaida: Arba per didelis kontekstas suteiktas modeliui, arba užklausų serveryje yra per daug")
125
-
126
 
127
 
128
 
@@ -691,7 +703,7 @@ def generate(state,QA_chain):
691
  }
692
 
693
 
694
- async def grade_documents(state, retrieval_grader):
695
  question = state["question"]
696
  documents = state["documents"]
697
  steps = state["steps"]
@@ -703,7 +715,7 @@ async def grade_documents(state, retrieval_grader):
703
 
704
  for d in documents:
705
  # Call the grading function
706
- score = await retrieval_grader.ainvoke({"question": question, "documents": d})
707
  print(f"Grader output for document: {score}") # Detailed debugging output
708
 
709
  # Extract the grade
 
91
  return retriever
92
 
93
 
94
+ def handle_userinput(user_question, custom_graph):
95
  # Add the user's question to the chat history and display it in the UI
96
  st.session_state.messages.append({"role": "user", "content": user_question})
97
  st.chat_message("user").write(user_question)
98
 
99
+ # Generate a unique thread ID for the graph's state
100
  config = {"configurable": {"thread_id": str(uuid.uuid4())}}
101
 
102
  try:
103
+ # Invoke the custom graph with the input question
104
+ state_dict = custom_graph.invoke(
105
+ {"question": user_question, "steps": []}, config
106
+ )
107
 
108
+ docs = state_dict["documents"]
 
109
  with st.sidebar:
110
  st.subheader("Dokumentai, kuriuos Birutė gavo kaip kontekstą")
111
+ with st.spinner("Processing"):
112
  for doc in docs:
113
+ # Extract document content
114
+ content = doc
115
+
116
+ # Extract document metadata if available
117
+ #metadata =doc.metadata.get('original_doc_name', 'unknown')
118
+ # Display content and metadata
119
+ st.write(f"Documentas: {content}")
120
+
121
+
122
+
123
+
124
+ # Check if a response (generation) was produced by the graph
125
+ if 'generation' in state_dict and state_dict['generation']:
126
+ response = state_dict["generation"]
127
+
128
+ # Add the assistant's response to the chat history and display it
129
  st.session_state.messages.append({"role": "assistant", "content": response})
130
  st.chat_message("assistant").write(response)
131
+ else:
132
+ st.chat_message("assistant").write("Your question violates toxicity rules or contains sensitive information.")
133
+
134
  except Exception as e:
135
  # Display an error message in case of failure
136
  st.chat_message("assistant").write("Klaida: Arba per didelis kontekstas suteiktas modeliui, arba užklausų serveryje yra per daug")
137
+
138
 
139
 
140
 
 
703
  }
704
 
705
 
706
+ def grade_documents(state, retrieval_grader):
707
  question = state["question"]
708
  documents = state["documents"]
709
  steps = state["steps"]
 
715
 
716
  for d in documents:
717
  # Call the grading function
718
+ score = retrieval_grader.invoke({"question": question, "documents": d})
719
  print(f"Grader output for document: {score}") # Detailed debugging output
720
 
721
  # Extract the grade