Add specific reports selection
Browse files- app.py +2 -2
- climateqa/constants.py +2 -0
- climateqa/engine/chains/retrieve_documents.py +4 -0
- climateqa/engine/graph.py +1 -0
app.py
CHANGED
@@ -137,7 +137,7 @@ async def chat(query, history, audience, sources, reports, relevant_content_sour
|
|
137 |
if reports is None or len(reports) == 0:
|
138 |
reports = []
|
139 |
|
140 |
-
inputs = {"user_input": query,"audience": audience_prompt,"sources_input":sources, "relevant_content_sources" : relevant_content_sources, "search_only": search_only}
|
141 |
result = agent.astream_events(inputs,version = "v1")
|
142 |
|
143 |
|
@@ -182,7 +182,7 @@ async def chat(query, history, audience, sources, reports, relevant_content_sour
|
|
182 |
if not hasattr(history[-1], 'metadata') or history[-1].metadata["title"] != event_description: # if a new step begins
|
183 |
history.append(ChatMessage(role="assistant", content = "", metadata={'title' :event_description}))
|
184 |
|
185 |
-
elif event["name"] != "transform_query" and event["event"] == "on_chat_model_stream" and node in ["answer_rag", "answer_search","answer_chitchat"]:# if streaming answer
|
186 |
history, start_streaming, answer_message_content = stream_answer(history, event, start_streaming, answer_message_content)
|
187 |
|
188 |
elif event["name"] in ["retrieve_graphs", "retrieve_graphs_ai"] and event["event"] == "on_chain_end":
|
|
|
137 |
if reports is None or len(reports) == 0:
|
138 |
reports = []
|
139 |
|
140 |
+
inputs = {"user_input": query,"audience": audience_prompt,"sources_input":sources, "relevant_content_sources" : relevant_content_sources, "search_only": search_only, "reports": reports}
|
141 |
result = agent.astream_events(inputs,version = "v1")
|
142 |
|
143 |
|
|
|
182 |
if not hasattr(history[-1], 'metadata') or history[-1].metadata["title"] != event_description: # if a new step begins
|
183 |
history.append(ChatMessage(role="assistant", content = "", metadata={'title' :event_description}))
|
184 |
|
185 |
+
elif event["name"] != "transform_query" and event["event"] == "on_chat_model_stream" and node in ["answer_rag", "answer_rag_no_docs","answer_search","answer_chitchat"]:# if streaming answer
|
186 |
history, start_streaming, answer_message_content = stream_answer(history, event, start_streaming, answer_message_content)
|
187 |
|
188 |
elif event["name"] in ["retrieve_graphs", "retrieve_graphs_ai"] and event["event"] == "on_chain_end":
|
climateqa/constants.py
CHANGED
@@ -1,4 +1,6 @@
|
|
1 |
POSSIBLE_REPORTS = [
|
|
|
|
|
2 |
"IPCC AR6 WGI SPM",
|
3 |
"IPCC AR6 WGI FR",
|
4 |
"IPCC AR6 WGI TS",
|
|
|
1 |
POSSIBLE_REPORTS = [
|
2 |
+
"IPBES IABWFH SPM",
|
3 |
+
"IPBES CBL SPM",
|
4 |
"IPCC AR6 WGI SPM",
|
5 |
"IPCC AR6 WGI FR",
|
6 |
"IPCC AR6 WGI TS",
|
climateqa/engine/chains/retrieve_documents.py
CHANGED
@@ -219,6 +219,7 @@ async def retrieve_documents(state,config, vectorstore,reranker,llm,rerank_by_qu
|
|
219 |
docs = state["documents"]
|
220 |
else:
|
221 |
docs = []
|
|
|
222 |
# Get the related_content from the state
|
223 |
if "related_content" in state and state["related_content"] is not None:
|
224 |
related_content = state["related_content"]
|
@@ -227,6 +228,8 @@ async def retrieve_documents(state,config, vectorstore,reranker,llm,rerank_by_qu
|
|
227 |
|
228 |
search_figures = "Figures (IPCC/IPBES)" in state["relevant_content_sources"]
|
229 |
search_only = state["search_only"]
|
|
|
|
|
230 |
|
231 |
# Get the current question
|
232 |
current_question = state["remaining_questions"][0]
|
@@ -256,6 +259,7 @@ async def retrieve_documents(state,config, vectorstore,reranker,llm,rerank_by_qu
|
|
256 |
k_images = k_images_by_question,
|
257 |
threshold = 0.5,
|
258 |
search_only = search_only,
|
|
|
259 |
)
|
260 |
|
261 |
|
|
|
219 |
docs = state["documents"]
|
220 |
else:
|
221 |
docs = []
|
222 |
+
|
223 |
# Get the related_content from the state
|
224 |
if "related_content" in state and state["related_content"] is not None:
|
225 |
related_content = state["related_content"]
|
|
|
228 |
|
229 |
search_figures = "Figures (IPCC/IPBES)" in state["relevant_content_sources"]
|
230 |
search_only = state["search_only"]
|
231 |
+
|
232 |
+
reports = state["reports"]
|
233 |
|
234 |
# Get the current question
|
235 |
current_question = state["remaining_questions"][0]
|
|
|
259 |
k_images = k_images_by_question,
|
260 |
threshold = 0.5,
|
261 |
search_only = search_only,
|
262 |
+
reports = reports,
|
263 |
)
|
264 |
|
265 |
|
climateqa/engine/graph.py
CHANGED
@@ -44,6 +44,7 @@ class GraphState(TypedDict):
|
|
44 |
related_contents : Dict[str,Document]
|
45 |
recommended_content : List[Document]
|
46 |
search_only : bool = False
|
|
|
47 |
|
48 |
def search(state): #TODO
|
49 |
return state
|
|
|
44 |
related_contents : Dict[str,Document]
|
45 |
recommended_content : List[Document]
|
46 |
search_only : bool = False
|
47 |
+
reports : List[str] = []
|
48 |
|
49 |
def search(state): #TODO
|
50 |
return state
|