Update functions.py
Browse files- functions.py +9 -9
functions.py
CHANGED
@@ -58,7 +58,7 @@ time_str = time.strftime("%d%m%Y-%H%M%S")
|
|
58 |
HTML_WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem;
|
59 |
margin-bottom: 2.5rem">{}</div> """
|
60 |
|
61 |
-
memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
|
62 |
|
63 |
#Stuff Chain Type Prompt template
|
64 |
|
@@ -230,15 +230,15 @@ def embed_text(query,embedding_model,_docsearch):
|
|
230 |
temperature=0
|
231 |
)
|
232 |
|
233 |
-
chain = RetrievalQA.from_chain_type(llm=chat_llm, chain_type="stuff",
|
234 |
-
|
235 |
-
|
236 |
|
237 |
-
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
242 |
|
243 |
answer = chain({"query": query})
|
244 |
|
|
|
58 |
HTML_WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem;
|
59 |
margin-bottom: 2.5rem">{}</div> """
|
60 |
|
61 |
+
memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True, output_key='answer')
|
62 |
|
63 |
#Stuff Chain Type Prompt template
|
64 |
|
|
|
230 |
temperature=0
|
231 |
)
|
232 |
|
233 |
+
# chain = RetrievalQA.from_chain_type(llm=chat_llm, chain_type="stuff",
|
234 |
+
# retriever=_docsearch.as_retriever(),
|
235 |
+
# return_source_documents=True)
|
236 |
|
237 |
+
chain = ConversationalRetrievalChain.from_llm(chat_llm,
|
238 |
+
retriever= _docsearch.as_retriever(),
|
239 |
+
qa_prompt = load_prompt(),
|
240 |
+
memory = memory,
|
241 |
+
return_source_documents=True)
|
242 |
|
243 |
answer = chain({"query": query})
|
244 |
|