lekkalar commited on
Commit
65e2d88
1 Parent(s): 868f6c9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -12
app.py CHANGED
@@ -10,10 +10,7 @@ from langchain.chains import RetrievalQA # for conversing with chatGPT
10
  from langchain.chat_models import ChatOpenAI # the LLM model we'll use (ChatGPT)
11
  from langchain import PromptTemplate
12
 
13
-
14
-
15
-
16
- def load_doc(pdf_doc, open_ai_key):
17
  if openai_key is not None:
18
  os.environ['OPENAI_API_KEY'] = open_ai_key
19
  #Load the pdf file
@@ -38,16 +35,18 @@ def load_doc(pdf_doc, open_ai_key):
38
  PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
39
  chain_type_kwargs = {"prompt": PROMPT}
40
  pdf_qa = RetrievalQA.from_chain_type(llm=ChatOpenAI(temperature=0, model_name="gpt-4"),chain_type="stuff", retriever=vectordb.as_retriever(), chain_type_kwargs=chain_type_kwargs, return_source_documents=False)
41
-
42
-
43
  return "Ready"
44
  else:
45
  return "Please provide an OpenAI API key"
 
46
 
47
-
 
48
  def answer_query(query):
49
  question = query
50
- return pdf_qa.run(question)
 
51
 
52
  css="""
53
  #col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
@@ -73,8 +72,8 @@ with gr.Blocks(css=css,theme=gr.themes.Monochrome()) as demo:
73
  with gr.Row():
74
  status = gr.Textbox(label="Status", placeholder="", interactive=False)
75
  load_pdf = gr.Button("Load PDF")
76
- with gr.Row():
77
- document_type = gr.Dropdown(['Deed of Trust', 'TRANSMITTAL SUMMARY'], label="PDF being loaded is")
78
 
79
  with gr.Row():
80
  input = gr.Textbox(label="Type in your question")
@@ -82,10 +81,14 @@ with gr.Blocks(css=css,theme=gr.themes.Monochrome()) as demo:
82
  submit_query = gr.Button("Submit")
83
 
84
 
85
- load_pdf.click(load_doc, inputs=[pdf_doc, openai_key], outputs=status)
 
86
 
87
  submit_query.click(answer_query,input,output)
88
 
89
 
90
- #forcing a save in order to re-build the container.
91
  demo.launch()
 
 
 
 
 
10
  from langchain.chat_models import ChatOpenAI # the LLM model we'll use (ChatGPT)
11
  from langchain import PromptTemplate
12
 
13
+ def load_pdf_and_generate_embeddings(pdf_doc, open_ai_key):
 
 
 
14
  if openai_key is not None:
15
  os.environ['OPENAI_API_KEY'] = open_ai_key
16
  #Load the pdf file
 
35
  PROMPT = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
36
  chain_type_kwargs = {"prompt": PROMPT}
37
  pdf_qa = RetrievalQA.from_chain_type(llm=ChatOpenAI(temperature=0, model_name="gpt-4"),chain_type="stuff", retriever=vectordb.as_retriever(), chain_type_kwargs=chain_type_kwargs, return_source_documents=False)
38
+
 
39
  return "Ready"
40
  else:
41
  return "Please provide an OpenAI API key"
42
+
43
 
44
+
45
+
46
  def answer_query(query):
47
  question = query
48
+ response = "Field Name: field; Question sent to gpt-4: ", question, "Response from gpt-4:",pdf_qa.run(question)
49
+ return response
50
 
51
  css="""
52
  #col-container {max-width: 700px; margin-left: auto; margin-right: auto;}
 
72
  with gr.Row():
73
  status = gr.Textbox(label="Status", placeholder="", interactive=False)
74
  load_pdf = gr.Button("Load PDF")
75
+
76
+
77
 
78
  with gr.Row():
79
  input = gr.Textbox(label="Type in your question")
 
81
  submit_query = gr.Button("Submit")
82
 
83
 
84
+ load_pdf.click(load_pdf_and_generate_embeddings, inputs=[pdf_doc, openai_key], outputs=status)
85
+
86
 
87
  submit_query.click(answer_query,input,output)
88
 
89
 
 
90
  demo.launch()
91
+
92
+
93
+
94
+