Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -796,7 +796,7 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
796 |
completion = chain.invoke({"question":prompt})
|
797 |
#chain = load_qa_chain(ChatOpenAI(temperature=temperature, max_tokens=max_tokens, model_name="gpt-3.5-turbo"), chain_type="stuff")
|
798 |
#completion = chain.run(input_documents=docs, question=query)
|
799 |
-
|
800 |
|
801 |
|
802 |
get_empty_state()
|
@@ -804,7 +804,8 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
804 |
#state.append(completion.copy())
|
805 |
|
806 |
completion = { "content": completion }
|
807 |
-
|
|
|
808 |
|
809 |
|
810 |
|
|
|
796 |
completion = chain.invoke({"question":prompt})
|
797 |
#chain = load_qa_chain(ChatOpenAI(temperature=temperature, max_tokens=max_tokens, model_name="gpt-3.5-turbo"), chain_type="stuff")
|
798 |
#completion = chain.run(input_documents=docs, question=query)
|
799 |
+
print(completion)
|
800 |
|
801 |
|
802 |
get_empty_state()
|
|
|
804 |
#state.append(completion.copy())
|
805 |
|
806 |
completion = { "content": completion }
|
807 |
+
print("completion2")
|
808 |
+
print(completion)
|
809 |
|
810 |
|
811 |
|