pvanand commited on
Commit
9720994
1 Parent(s): 4a4b7af

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +4 -4
main.py CHANGED
@@ -646,12 +646,12 @@ async def followup_agent_v4(query: FollowupQueryModel, background_tasks: Backgro
646
 
647
 
648
  async def process_response():
649
- yield "<followup-response>+\n"
650
  full_response = ""
651
  for content in chat_with_llama_stream(limited_conversation, model=query.model_id):
652
  full_response += content
653
  yield content
654
- yield "</followup-response>+\n"
655
  yield "--END_SECTION--\n"
656
 
657
  logger.info(f"LLM RAW response for query: {query.query}: {full_response}")
@@ -662,9 +662,9 @@ async def followup_agent_v4(query: FollowupQueryModel, background_tasks: Backgro
662
  "clarification": interact
663
  }
664
 
665
- yield "<followup-json> + "\n"
666
  yield json.dumps(result) + "\n"
667
- yield "</followup-json> +"\n"
668
  yield "--END_SECTION--\n"
669
  # Add the assistant's response to the conversation history
670
  conversations[query.conversation_id].append({"role": "assistant", "content": full_response})
 
646
 
647
 
648
  async def process_response():
649
+ yield "<followup-response>"+"\n"
650
  full_response = ""
651
  for content in chat_with_llama_stream(limited_conversation, model=query.model_id):
652
  full_response += content
653
  yield content
654
+ yield "</followup-response>"+"\n"
655
  yield "--END_SECTION--\n"
656
 
657
  logger.info(f"LLM RAW response for query: {query.query}: {full_response}")
 
662
  "clarification": interact
663
  }
664
 
665
+ yield "<followup-json>" + "\n"
666
  yield json.dumps(result) + "\n"
667
+ yield "</followup-json>" +"\n"
668
  yield "--END_SECTION--\n"
669
  # Add the assistant's response to the conversation history
670
  conversations[query.conversation_id].append({"role": "assistant", "content": full_response})