OuroborosM commited on
Commit
0e3b32d
·
1 Parent(s): 3b25af6

correct bug

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -766,7 +766,7 @@ agent_ZEROSHOT_REACT_2 = initialize_agent(tools_remote, GPTfake,
766
  llm_chain = LLMChain(llm=llm, prompt=prompt)
767
  llm_chain_2 = LLMChain(llm=GPTfake, prompt=prompt)
768
 
769
- print("Test LLM Chain", llm_chain_2({'agent_scratchpad':"", 'input':"what is PDP?"}))
770
 
771
  # llm_chain_openai = LLMChain(llm=llm, prompt=prompt_openai, verbose=True)
772
 
@@ -1414,11 +1414,13 @@ def CreatDb_P():
1414
  def QAQuery_p(question: str):
1415
  global vectordb_p
1416
  global agent
 
1417
  # vectordb = Chroma(persist_directory='db', embedding_function=embeddings)
1418
  retriever = vectordb_p.as_retriever()
1419
  retriever.search_kwargs['k'] = int(os.environ["search_kwargs_k"])
1420
  # retriever.search_kwargs['fetch_k'] = 100
1421
- if agent == agent_ZEROSHOT_REACT_2 or agent == agent_ZEROSHOT_AGENT_2:
 
1422
  print("--------------- QA with Remote --------------")
1423
  qa = RetrievalQA.from_chain_type(llm=GPTfake, chain_type="stuff",
1424
  retriever=retriever, return_source_documents = True,
 
766
  llm_chain = LLMChain(llm=llm, prompt=prompt)
767
  llm_chain_2 = LLMChain(llm=GPTfake, prompt=prompt)
768
 
769
+ # print("Test LLM Chain", llm_chain_2({'agent_scratchpad':"", 'input':"what is PDP?"}))
770
 
771
  # llm_chain_openai = LLMChain(llm=llm, prompt=prompt_openai, verbose=True)
772
 
 
1414
  def QAQuery_p(question: str):
1415
  global vectordb_p
1416
  global agent
1417
+ global Choice
1418
  # vectordb = Chroma(persist_directory='db', embedding_function=embeddings)
1419
  retriever = vectordb_p.as_retriever()
1420
  retriever.search_kwargs['k'] = int(os.environ["search_kwargs_k"])
1421
  # retriever.search_kwargs['fetch_k'] = 100
1422
+ # if agent == agent_ZEROSHOT_REACT_2 or agent == agent_ZEROSHOT_AGENT_2:
1423
+ if Choice == 'Zero Short React 2' or Choice == "Zero Short Agent 2":
1424
  print("--------------- QA with Remote --------------")
1425
  qa = RetrievalQA.from_chain_type(llm=GPTfake, chain_type="stuff",
1426
  retriever=retriever, return_source_documents = True,