Carlosito16 commited on
Commit
1adc68d
1 Parent(s): 1d0f253

Update pages/3_chat.py

Browse files
Files changed (1) hide show
  1. pages/3_chat.py +4 -5
pages/3_chat.py CHANGED
@@ -41,7 +41,7 @@ PROMPT = PromptTemplate(
41
  # chain_type_kwargs = {"prompt": PROMPT}
42
 
43
  @st.cache_resource
44
- def load_conversational_qa_memory_retriever(llm_model, vector_database):
45
 
46
  question_generator = LLMChain(llm=llm_model, prompt=CONDENSE_QUESTION_PROMPT)
47
  doc_chain = load_qa_chain(llm_model, chain_type="stuff", prompt = PROMPT)
@@ -86,10 +86,9 @@ if "chat_history" not in st.session_state: #this one is to pass previous message
86
  st.session_state.chat_history = []
87
 
88
 
89
-
90
-
91
- conversational_qa_memory_retriever, question_generator = load_conversational_qa_memory_retriever(llm_model = st.session_state['model'],
92
- vector_database = st.session_state['faiss_db'])
93
 
94
 
95
 
 
41
  # chain_type_kwargs = {"prompt": PROMPT}
42
 
43
  @st.cache_resource
44
+ def load_conversational_qa_memory_retriever():
45
 
46
  question_generator = LLMChain(llm=llm_model, prompt=CONDENSE_QUESTION_PROMPT)
47
  doc_chain = load_qa_chain(llm_model, chain_type="stuff", prompt = PROMPT)
 
86
  st.session_state.chat_history = []
87
 
88
 
89
+ llm_model = st.session_state['model'],
90
+ vector_database = st.session_state['faiss_db']
91
+ conversational_qa_memory_retriever, question_generator = load_conversational_qa_memory_retriever()
 
92
 
93
 
94