ryanrwatkins commited on
Commit
c61a6f8
·
verified ·
1 Parent(s): f8a900d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -1
app.py CHANGED
@@ -668,7 +668,65 @@ follow_up_question = "plaese give more details about it, including its use cases
668
  chain.invoke({"question":follow_up_question})['answer']
669
 
670
 
 
671
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
672
  # 1. load memory using RunnableLambda. Retrieves the chat_history attribute using itemgetter.
673
  # `RunnablePassthrough.assign` adds the chat_history to the assign function
674
 
@@ -776,7 +834,7 @@ memory.save_context(
776
 
777
 
778
 
779
-
780
  questions = ["what does DTC stands for?",
781
  "plaese give more details about it, including its use cases and implementation.",
782
  "does it outperform other diffusion-based models? explain in details.",
 
668
  chain.invoke({"question":follow_up_question})['answer']
669
 
670
 
671
+ from langchain.chains import ConversationalRetrievalChain
672
 
673
+ def create_ConversationalRetrievalChain(
674
+ llm,condense_question_llm,
675
+ retriever,
676
+ chain_type= 'stuff',
677
+ language="english",
678
+ model_name='gpt-3.5-turbo'
679
+ ):
680
+ """Create a ConversationalRetrievalChain.
681
+ First, it passes the follow-up question along with the chat history to an LLM which rephrases
682
+ the question and generates a standalone query.
683
+ This query is then sent to the retriever, which fetches relevant documents (context)
684
+ and passes them along with the standalone question and chat history to an LLM to answer.
685
+ """
686
+
687
+ # 1. Define the standalone_question prompt.
688
+ # Pass the follow-up question along with the chat history to the `condense_question_llm`
689
+ # which rephrases the question and generates a standalone question.
690
+
691
+ standalone_question_prompt = PromptTemplate(
692
+ input_variables=['chat_history', 'question'],
693
+ template="""Given the following conversation and a follow up question,
694
+ rephrase the follow up question to be a standalone question, in its original language.\n\n
695
+ Chat History:\n{chat_history}\n
696
+ Follow Up Input: {question}\n
697
+ Standalone question:""")
698
+
699
+ # 2. Define the answer_prompt
700
+ # Pass the standalone question + the chat history + the context (retrieved documents) to the `LLM` wihch will answer
701
+
702
+ answer_prompt = ChatPromptTemplate.from_template(answer_template(language=language))
703
+
704
+ # 3. Add ConversationSummaryBufferMemory for gpt-3.5, and ConversationBufferMemory for the other models
705
+
706
+ memory = create_memory(model_name)
707
+
708
+ # 4. Create the ConversationalRetrievalChain
709
+
710
+ chain = ConversationalRetrievalChain.from_llm(
711
+ condense_question_prompt=standalone_question_prompt,
712
+ combine_docs_chain_kwargs={'prompt': answer_prompt},
713
+ condense_question_llm=condense_question_llm,
714
+
715
+ memory=memory,
716
+ retriever = retriever,
717
+ llm=llm,
718
+
719
+ chain_type= chain_type,
720
+ verbose= False,
721
+ return_source_documents=True
722
+ )
723
+
724
+ print("Conversational retriever chain created successfully!")
725
+
726
+ return chain,memory
727
+
728
+
729
+ """
730
  # 1. load memory using RunnableLambda. Retrieves the chat_history attribute using itemgetter.
731
  # `RunnablePassthrough.assign` adds the chat_history to the assign function
732
 
 
834
 
835
 
836
 
837
+ """
838
  questions = ["what does DTC stands for?",
839
  "plaese give more details about it, including its use cases and implementation.",
840
  "does it outperform other diffusion-based models? explain in details.",