Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
@@ -595,13 +595,13 @@ async def followup_agent(query: FollowupQueryModel, background_tasks: Background
|
|
595 |
|
596 |
|
597 |
def process_response():
|
598 |
-
yield "<followup-response>\n"
|
599 |
full_response = ""
|
600 |
for content in chat_with_llama_stream(limited_conversation, model=query.model_id):
|
601 |
full_response += content
|
602 |
yield content
|
603 |
|
604 |
-
yield "</followup-response>\n"
|
605 |
|
606 |
logger.info(f"LLM RAW response for query: {query.query}: {full_response}")
|
607 |
response_content, interact,tools = parse_followup_and_tools(full_response)
|
@@ -610,9 +610,9 @@ async def followup_agent(query: FollowupQueryModel, background_tasks: Background
|
|
610 |
"clarification": interact
|
611 |
}
|
612 |
|
613 |
-
yield "<followup-json>\n"
|
614 |
-
yield json.dumps(result) + "\n"
|
615 |
-
yield "</followup-json>\n"
|
616 |
|
617 |
# Add the assistant's response to the conversation history
|
618 |
conversations[query.conversation_id].append({"role": "assistant", "content": full_response})
|
|
|
595 |
|
596 |
|
597 |
def process_response():
|
598 |
+
yield "<followup-response>\n\n"
|
599 |
full_response = ""
|
600 |
for content in chat_with_llama_stream(limited_conversation, model=query.model_id):
|
601 |
full_response += content
|
602 |
yield content
|
603 |
|
604 |
+
yield "</followup-response>\n\n"
|
605 |
|
606 |
logger.info(f"LLM RAW response for query: {query.query}: {full_response}")
|
607 |
response_content, interact,tools = parse_followup_and_tools(full_response)
|
|
|
610 |
"clarification": interact
|
611 |
}
|
612 |
|
613 |
+
yield "<followup-json>\n\n"
|
614 |
+
yield json.dumps(result) + "\n\n"
|
615 |
+
yield "</followup-json>\n\n"
|
616 |
|
617 |
# Add the assistant's response to the conversation history
|
618 |
conversations[query.conversation_id].append({"role": "assistant", "content": full_response})
|