sanjeevbora commited on
Commit
4d65906
·
verified ·
1 Parent(s): c514c01

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -26
app.py CHANGED
@@ -31,25 +31,27 @@ DATA_DIR = "data"
31
  os.makedirs(DATA_DIR, exist_ok=True)
32
  os.makedirs(PERSIST_DIR, exist_ok=True)
33
 
34
- def displayPDF(file):
35
- with open(file, "rb") as f:
36
- base64_pdf = base64.b64encode(f.read()).decode('utf-8')
37
- pdf_display = f'<iframe src="data:application/pdf;base64,{base64_pdf}" width="100%" height="600" type="application/pdf"></iframe>'
38
- return pdf_display
39
 
 
40
  def data_ingestion(files):
41
- for file in files:
42
- filepath = os.path.join(DATA_DIR, file.name)
43
  with open(filepath, "wb") as f:
44
- f.write(file.getbuffer())
45
  documents = SimpleDirectoryReader(DATA_DIR).load_data()
46
  storage_context = StorageContext.from_defaults()
47
  index = VectorStoreIndex.from_documents(documents)
48
  index.storage_context.persist(persist_dir=PERSIST_DIR)
 
49
 
 
50
  def handle_query(query):
51
  storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
52
  index = load_index_from_storage(storage_context)
 
53
  chat_text_qa_msgs = [
54
  (
55
  "user",
@@ -63,8 +65,8 @@ def handle_query(query):
63
  ]
64
 
65
  text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
66
-
67
  query_engine = index.as_query_engine(text_qa_template=text_qa_template)
 
68
  answer = query_engine.query(query)
69
 
70
  if hasattr(answer, 'response'):
@@ -74,23 +76,38 @@ def handle_query(query):
74
  else:
75
  return "Sorry, I couldn't find an answer."
76
 
77
- # Gradio app setup
78
- def gradio_app(files, user_query):
79
  if files:
80
- data_ingestion(files) # Process PDFs after they are uploaded
81
- response = handle_query(user_query)
82
- return response
83
- return "Please upload at least one PDF file."
 
 
 
84
 
85
- interface = gr.Interface(
86
- fn=gradio_app,
87
- inputs=[
88
- gr.File(label="Upload PDF Files", type="file", file_count="multiple"),
89
- gr.Textbox(label="Ask me anything about the content of the PDF(s):")
90
- ],
91
- outputs="text",
92
- title="(PDF) Information and Inference🗞️",
93
- description="Retrieval-Augmented Generation. Start chat ...🚀"
94
- )
 
 
 
 
 
 
 
 
 
 
 
 
95
 
96
- interface.launch()
 
31
  os.makedirs(DATA_DIR, exist_ok=True)
32
  os.makedirs(PERSIST_DIR, exist_ok=True)
33
 
34
+ # Function to display PDF (for Gradio it will just show a message saying PDF was uploaded)
35
+ def display_pdf(file):
36
+ return f"Uploaded PDF: {file.name}"
 
 
37
 
38
+ # Data ingestion function
39
  def data_ingestion(files):
40
+ for uploaded_file in files:
41
+ filepath = os.path.join(DATA_DIR, uploaded_file.name)
42
  with open(filepath, "wb") as f:
43
+ f.write(uploaded_file.read())
44
  documents = SimpleDirectoryReader(DATA_DIR).load_data()
45
  storage_context = StorageContext.from_defaults()
46
  index = VectorStoreIndex.from_documents(documents)
47
  index.storage_context.persist(persist_dir=PERSIST_DIR)
48
+ return "PDFs processed successfully!"
49
 
50
+ # Query handling function
51
  def handle_query(query):
52
  storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
53
  index = load_index_from_storage(storage_context)
54
+
55
  chat_text_qa_msgs = [
56
  (
57
  "user",
 
65
  ]
66
 
67
  text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
 
68
  query_engine = index.as_query_engine(text_qa_template=text_qa_template)
69
+
70
  answer = query_engine.query(query)
71
 
72
  if hasattr(answer, 'response'):
 
76
  else:
77
  return "Sorry, I couldn't find an answer."
78
 
79
+ # Chatbot functionality
80
+ def chatbot(files, user_input, history):
81
  if files:
82
+ data_ingestion(files) # Process PDFs
83
+ history.append(("assistant", "Your PDFs have been processed. You can now ask questions."))
84
+ if user_input:
85
+ response = handle_query(user_input)
86
+ history.append(("user", user_input))
87
+ history.append(("assistant", response))
88
+ return history, history
89
 
90
+ # Gradio Interface
91
+ with gr.Blocks() as app:
92
+ gr.Markdown("# (PDF) Information and Inference 🗞️")
93
+ gr.Markdown("Upload PDF files and ask questions about their content!")
94
+
95
+ with gr.Row():
96
+ with gr.Column(scale=2):
97
+ file_upload = gr.File(label="Upload your PDF files", file_types=["pdf"], multiple=True)
98
+ with gr.Column(scale=8):
99
+ chatbot_interface = gr.Chatbot(label="Q&A Assistant", elem_id="chatbot")
100
+
101
+ user_input = gr.Textbox(label="Ask a question", placeholder="Type your question here...")
102
+
103
+ history = gr.State([]) # To hold chat history
104
+
105
+ submit_button = gr.Button("Submit")
106
+
107
+ submit_button.click(
108
+ fn=chatbot,
109
+ inputs=[file_upload, user_input, history],
110
+ outputs=[chatbot_interface, history]
111
+ )
112
 
113
+ app.launch()