Spaces:
Sleeping
Sleeping
xuyingliKepler
commited on
Commit
Β·
bc6c9fb
1
Parent(s):
613ac12
Update app.py
Browse files
app.py
CHANGED
@@ -69,10 +69,10 @@ def smaller_chunks_strategy(docs):
|
|
69 |
retriever.vectorstore.add_documents(sub_docs)
|
70 |
retriever.docstore.mset(list(zip(doc_ids, docs)))
|
71 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
|
77 |
|
78 |
def summary_strategy(docs):
|
@@ -101,10 +101,10 @@ def summary_strategy(docs):
|
|
101 |
summary_docs = [Document(page_content=s, metadata={id_key: doc_ids[i]}) for i, s in enumerate(summaries)]
|
102 |
retriever.vectorstore.add_documents(summary_docs)
|
103 |
retriever.docstore.mset(list(zip(doc_ids, docs)))
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
|
109 |
|
110 |
def hypothetical_questions_strategy(docs):
|
@@ -153,10 +153,10 @@ def hypothetical_questions_strategy(docs):
|
|
153 |
question_docs.extend([Document(page_content=s, metadata={id_key: doc_ids[i]}) for s in question_list])
|
154 |
retriever.vectorstore.add_documents(question_docs)
|
155 |
retriever.docstore.mset(list(zip(doc_ids, docs)))
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
|
161 |
|
162 |
|
|
|
69 |
retriever.vectorstore.add_documents(sub_docs)
|
70 |
retriever.docstore.mset(list(zip(doc_ids, docs)))
|
71 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
72 |
+
qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0), retriever, memory=memory)
|
73 |
+
st.info(prompt, icon="π§")
|
74 |
+
result = qa({"question": prompt})
|
75 |
+
st.success(result['answer'], icon="π€")
|
76 |
|
77 |
|
78 |
def summary_strategy(docs):
|
|
|
101 |
summary_docs = [Document(page_content=s, metadata={id_key: doc_ids[i]}) for i, s in enumerate(summaries)]
|
102 |
retriever.vectorstore.add_documents(summary_docs)
|
103 |
retriever.docstore.mset(list(zip(doc_ids, docs)))
|
104 |
+
qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0), retriever, memory=ConversationBufferMemory(memory_key="chat_history", return_messages=True))
|
105 |
+
st.info(prompt, icon="π§")
|
106 |
+
result = qa({"question": prompt})
|
107 |
+
st.success(result['answer'], icon="π€")
|
108 |
|
109 |
|
110 |
def hypothetical_questions_strategy(docs):
|
|
|
153 |
question_docs.extend([Document(page_content=s, metadata={id_key: doc_ids[i]}) for s in question_list])
|
154 |
retriever.vectorstore.add_documents(question_docs)
|
155 |
retriever.docstore.mset(list(zip(doc_ids, docs)))
|
156 |
+
qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0), retriever, memory=ConversationBufferMemory(memory_key="chat_history", return_messages=True))
|
157 |
+
st.info(prompt, icon="π§")
|
158 |
+
result = qa({"question": prompt})
|
159 |
+
st.success(result['answer'], icon="π€")
|
160 |
|
161 |
|
162 |
|