lekkalar commited on
Commit
c28a184
1 Parent(s): f6709bf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -5
app.py CHANGED
@@ -11,7 +11,7 @@ from langchain.chains import RetrievalQA # for conversing with chatGPT
11
  from langchain.chat_models import ChatOpenAI # the LLM model we'll use (ChatGPT)
12
  from langchain import PromptTemplate
13
 
14
- def load_pdf_and_generate_embeddings(pdf_doc, open_ai_key):
15
  if openai_key is not None:
16
  os.environ['OPENAI_API_KEY'] = open_ai_key
17
  #Load the pdf file
@@ -21,10 +21,11 @@ def load_pdf_and_generate_embeddings(pdf_doc, open_ai_key):
21
  #Create an instance of OpenAIEmbeddings, which is responsible for generating embeddings for text
22
  embeddings = OpenAIEmbeddings()
23
 
24
- #To create a vector store, we use the Chroma class, which takes the documents (pages in our case), the embeddings instance, and a directory to store the vector data
25
- vectordb = Chroma.from_documents(pages, embedding=embeddings)
 
26
 
27
- #Finally, we create the bot using the RetrievalQAChain class
28
  global pdf_qa
29
 
30
  prompt_template = """Use the following pieces of context to answer the question at the end. If you do not know the answer, just return N/A. If you encounter a date, return it in mm/dd/yyyy format.
@@ -129,7 +130,7 @@ with gr.Blocks(css=css,theme=gr.themes.Monochrome()) as demo:
129
  submit_query = gr.Button("Submit your own question to gpt-4").style(full_width=False)
130
 
131
 
132
- load_pdf.click(load_pdf_and_generate_embeddings, inputs=[pdf_doc, openai_key], outputs=status)
133
 
134
  answers_for_predefined_question_set.click(answer_predefined_questions, document_type, answers)
135
 
 
11
  from langchain.chat_models import ChatOpenAI # the LLM model we'll use (ChatGPT)
12
  from langchain import PromptTemplate
13
 
14
+ def load_pdf_and_generate_embeddings(pdf_doc, open_ai_key, relevant_pages='all'):
15
  if openai_key is not None:
16
  os.environ['OPENAI_API_KEY'] = open_ai_key
17
  #Load the pdf file
 
21
  #Create an instance of OpenAIEmbeddings, which is responsible for generating embeddings for text
22
  embeddings = OpenAIEmbeddings()
23
 
24
+ if relevant_pages == 'all':
25
+ #To create a vector store, we use the Chroma class, which takes the documents (pages in our case) and the embeddings instance
26
+ vectordb = Chroma.from_documents(pages, embedding=embeddings)
27
 
28
+ #Finally, we create the bot using the RetrievalQA class
29
  global pdf_qa
30
 
31
  prompt_template = """Use the following pieces of context to answer the question at the end. If you do not know the answer, just return N/A. If you encounter a date, return it in mm/dd/yyyy format.
 
130
  submit_query = gr.Button("Submit your own question to gpt-4").style(full_width=False)
131
 
132
 
133
+ load_pdf.click(load_pdf_and_generate_embeddings, inputs=[pdf_doc, openai_key, relevant_pages], outputs=status)
134
 
135
  answers_for_predefined_question_set.click(answer_predefined_questions, document_type, answers)
136