Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -677,7 +677,7 @@ def answer_template(language="english"):
|
|
677 |
|
678 |
|
679 |
|
680 |
-
|
681 |
|
682 |
|
683 |
|
@@ -686,7 +686,7 @@ chain = ConversationalRetrievalChain.from_llm(
|
|
686 |
condense_question_prompt=PromptTemplate(
|
687 |
input_variables=['chat_history', 'question'],
|
688 |
template=standalone_question_template),
|
689 |
-
combine_docs_chain_kwargs={'prompt':
|
690 |
condense_question_llm=instantiate_LLM(
|
691 |
LLM_provider="Google",api_key=google_api_key,temperature=0.3,
|
692 |
model_name="gemini-pro"),
|
@@ -827,9 +827,9 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
|
|
827 |
if selected_document is not None:
|
828 |
# Remove the "/home/user/app/" part from the document name
|
829 |
modified_source = selected_document.metadata['source'].replace('/home/user/app/', '').replace('.pdf', '')
|
830 |
-
source_info = f"\n**
|
831 |
else:
|
832 |
-
source_info = "
|
833 |
|
834 |
#chat_messages = [(prompt_msg['content'], completion['content'])]
|
835 |
chat_messages = [(prompt, completion['content']['answer'] + source_info )]
|
|
|
677 |
|
678 |
|
679 |
|
680 |
+
answer_prompt = ChatPromptTemplate.from_template(answer_template())
|
681 |
|
682 |
|
683 |
|
|
|
686 |
condense_question_prompt=PromptTemplate(
|
687 |
input_variables=['chat_history', 'question'],
|
688 |
template=standalone_question_template),
|
689 |
+
combine_docs_chain_kwargs={'prompt': answer_prompt},
|
690 |
condense_question_llm=instantiate_LLM(
|
691 |
LLM_provider="Google",api_key=google_api_key,temperature=0.3,
|
692 |
model_name="gemini-pro"),
|
|
|
827 |
if selected_document is not None:
|
828 |
# Remove the "/home/user/app/" part from the document name
|
829 |
modified_source = selected_document.metadata['source'].replace('/home/user/app/', '').replace('.pdf', '')
|
830 |
+
source_info = f"\n**Lead source:** {modified_source}, **Page:** {selected_document.metadata['page']}"
|
831 |
else:
|
832 |
+
source_info = "Lead source: not determined"
|
833 |
|
834 |
#chat_messages = [(prompt_msg['content'], completion['content'])]
|
835 |
chat_messages = [(prompt, completion['content']['answer'] + source_info )]
|