lekkalar commited on
Commit
92a96a7
1 Parent(s): 2b0a298

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -6,7 +6,7 @@ import time
6
  from langchain.document_loaders import OnlinePDFLoader #for laoding the pdf
7
  from langchain.embeddings import OpenAIEmbeddings # for creating embeddings
8
  from langchain.vectorstores import Chroma # for the vectorization part
9
- from langchain.chains import ConversationalRetrievalChain # for conversing with chatGPT
10
  from langchain.chat_models import ChatOpenAI # the LLM model we'll use (ChatGPT)
11
 
12
  def loading_pdf():
@@ -29,7 +29,7 @@ def pdf_changes(pdf_doc, open_ai_key):
29
  #A ConversationalRetrievalChain is similar to a RetrievalQAChain, except that the ConversationalRetrievalChain allows for
30
  #passing in of a chat history which can be used to allow for follow up questions.
31
  global pdf_qa
32
- pdf_qa = ConversationalRetrievalChain.from_llm(ChatOpenAI(temperature=0, model_name="gpt-4"), vectordb.as_retriever(), return_source_documents=False)
33
 
34
  return "Ready"
35
  else:
 
6
  from langchain.document_loaders import OnlinePDFLoader #for laoding the pdf
7
  from langchain.embeddings import OpenAIEmbeddings # for creating embeddings
8
  from langchain.vectorstores import Chroma # for the vectorization part
9
+ from langchain.chains import RetrievalQA # for conversing with chatGPT
10
  from langchain.chat_models import ChatOpenAI # the LLM model we'll use (ChatGPT)
11
 
12
  def loading_pdf():
 
29
  #A ConversationalRetrievalChain is similar to a RetrievalQAChain, except that the ConversationalRetrievalChain allows for
30
  #passing in of a chat history which can be used to allow for follow up questions.
31
  global pdf_qa
32
+ pdf_qa = RetrievalQA.from_chain_type(ChatOpenAI(temperature=0, model_name="gpt-4"), vectordb.as_retriever(), return_source_documents=False)
33
 
34
  return "Ready"
35
  else: