Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -712,7 +712,7 @@ chain = ConversationalRetrievalChain.from_llm(
|
|
712 |
|
713 |
# let's invoke the chain
|
714 |
response = chain.invoke({"question":"what does Google stand for?"})
|
715 |
-
print(response)
|
716 |
|
717 |
|
718 |
chain.memory.load_memory_variables({})
|
@@ -809,7 +809,7 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
809 |
|
810 |
|
811 |
#chat_messages = [(prompt_msg['content'], completion['content'])]
|
812 |
-
chat_messages = [(prompt, completion['
|
813 |
return '', chat_messages, state # total_tokens_used_msg,
|
814 |
|
815 |
|
|
|
712 |
|
713 |
# let's invoke the chain
|
714 |
response = chain.invoke({"question":"what does Google stand for?"})
|
715 |
+
print(response['answer'])
|
716 |
|
717 |
|
718 |
chain.memory.load_memory_variables({})
|
|
|
809 |
|
810 |
|
811 |
#chat_messages = [(prompt_msg['content'], completion['content'])]
|
812 |
+
chat_messages = [(prompt, completion['answer'])]
|
813 |
return '', chat_messages, state # total_tokens_used_msg,
|
814 |
|
815 |
|