Ritesh-hf commited on
Commit
20356fd
1 Parent(s): e5c8160

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -95,12 +95,12 @@ llm = ChatOpenAI(temperature=0, model_name="gpt-4o-mini", max_tokens=512)
95
  # base_compressor=compressor, base_retriever=retriever
96
  # )
97
 
98
- from langchain_community.document_compressors.rankllm_rerank import RankLLMRerank
99
 
100
- compressor = RankLLMRerank(top_n=3, model="gpt", gpt_model="gpt-4o-mini")
101
- compression_retriever = ContextualCompressionRetriever(
102
- base_compressor=compressor, base_retriever=retriever
103
- )
104
 
105
  # Contextualization prompt and retriever
106
  contextualize_q_system_prompt = """Given a chat history and the latest user question \
@@ -115,7 +115,7 @@ contextualize_q_prompt = ChatPromptTemplate.from_messages(
115
  ("human", "{input}")
116
  ]
117
  )
118
- history_aware_retriever = create_history_aware_retriever(llm, compression_retriever, contextualize_q_prompt)
119
 
120
  # QA system prompt and chain
121
  qa_system_prompt = """ You are a highly skilled information retrieval assistant. Use the following context to answer questions effectively.
 
95
  # base_compressor=compressor, base_retriever=retriever
96
  # )
97
 
98
+ # from langchain_community.document_compressors.rankllm_rerank import RankLLMRerank
99
 
100
+ # compressor = RankLLMRerank(top_n=3, model="gpt", gpt_model="gpt-4o-mini")
101
+ # compression_retriever = ContextualCompressionRetriever(
102
+ # base_compressor=compressor, base_retriever=retriever
103
+ # )
104
 
105
  # Contextualization prompt and retriever
106
  contextualize_q_system_prompt = """Given a chat history and the latest user question \
 
115
  ("human", "{input}")
116
  ]
117
  )
118
+ history_aware_retriever = create_history_aware_retriever(llm, retriever, contextualize_q_prompt)
119
 
120
  # QA system prompt and chain
121
  qa_system_prompt = """ You are a highly skilled information retrieval assistant. Use the following context to answer questions effectively.