TillLangbein commited on
Commit
6ae72b8
·
1 Parent(s): b585750

f-string bugfix

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -360,9 +360,9 @@ def generate_response(question: str, dora: bool, rts: bool, news: bool):
360
  state = app.invoke({"question": question, "selected_sources": selected_sources})
361
  return (
362
  state["generation"],
363
- ('\n\n'.join([f"***{doc.metadata["source"]} section {doc.metadata['section']}***: {doc.page_content}" for doc in state["dora_docs"]])) if "dora_docs" in state and state["dora_docs"] else 'No documents available.',
364
- ('\n\n'.join([f"***{doc.metadata["source"]}, section {doc.metadata['section']}***: {doc.page_content}" for doc in state["dora_rts_docs"]])) if "dora_rts_docs" in state and state["dora_rts_docs"] else 'No documents available.',
365
- ('\n\n'.join([f"***{doc.metadata["source"]}***: {doc.page_content}" for doc in state["dora_news_docs"]])) if "dora_news_docs" in state and state["dora_news_docs"] else 'No documents available.',
366
  )
367
 
368
  def show_loading(prompt: str):
@@ -392,7 +392,7 @@ def run_gradio():
392
  # Adding a sliding navbar
393
  with gr.Column(scale=1, elem_id='navbar'):
394
  gr.Image(
395
- '..\\deployment\\logo.png',
396
  interactive=False,
397
  show_label=False,
398
  scale=1,
@@ -480,7 +480,7 @@ if __name__ == "__main__":
480
 
481
  dora_question_rewriter = IMPROVE_PROMPT | tool_llm | StrOutputParser()
482
  retrieval_grader = RELEVANCE_PROMPT | fast_llm.with_structured_output(GradeDocuments)
483
- answer_chain = ANSWER_PROMPT | smart_llm | StrOutputParser() #former RAG chain
484
  hallucination_grader = HALLUCINATION_PROMPT | fast_llm.with_structured_output(GradeHallucinations)
485
  answer_grader = RESOLVER_PROMPT | fast_llm.with_structured_output(GradeAnswer)
486
  question_rewriter = REWRITER_PROMPT | rewrite_llm | StrOutputParser()
 
360
  state = app.invoke({"question": question, "selected_sources": selected_sources})
361
  return (
362
  state["generation"],
363
+ ('\n\n'.join([f"***{doc.metadata['source']} section {doc.metadata['section']}***: {doc.page_content}" for doc in state["dora_docs"]])) if "dora_docs" in state and state["dora_docs"] else 'No documents available.',
364
+ ('\n\n'.join([f"***{doc.metadata['source']}, section {doc.metadata['section']}***: {doc.page_content}" for doc in state["dora_rts_docs"]])) if "dora_rts_docs" in state and state["dora_rts_docs"] else 'No documents available.',
365
+ ('\n\n'.join([f"***{doc.metadata['source']}***: {doc.page_content}" for doc in state["dora_news_docs"]])) if "dora_news_docs" in state and state["dora_news_docs"] else 'No documents available.',
366
  )
367
 
368
  def show_loading(prompt: str):
 
392
  # Adding a sliding navbar
393
  with gr.Column(scale=1, elem_id='navbar'):
394
  gr.Image(
395
+ './logo.png',
396
  interactive=False,
397
  show_label=False,
398
  scale=1,
 
480
 
481
  dora_question_rewriter = IMPROVE_PROMPT | tool_llm | StrOutputParser()
482
  retrieval_grader = RELEVANCE_PROMPT | fast_llm.with_structured_output(GradeDocuments)
483
+ answer_chain = ANSWER_PROMPT | tool_llm | StrOutputParser() #former RAG chain
484
  hallucination_grader = HALLUCINATION_PROMPT | fast_llm.with_structured_output(GradeHallucinations)
485
  answer_grader = RESOLVER_PROMPT | fast_llm.with_structured_output(GradeAnswer)
486
  question_rewriter = REWRITER_PROMPT | rewrite_llm | StrOutputParser()