Spaces:
Sleeping
Sleeping
Roberta2024
commited on
Commit
•
ec8e07e
1
Parent(s):
b11ddc9
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,6 @@ import gradio as gr
|
|
3 |
import asyncio
|
4 |
from langchain_core.prompts import PromptTemplate
|
5 |
from langchain_community.document_loaders import PyPDFLoader
|
6 |
-
from langchain.chains.question_answering import load_qa_chain
|
7 |
import torch
|
8 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
9 |
|
@@ -42,8 +41,12 @@ input_file = gr.File(label="Upload PDF File")
|
|
42 |
input_question = gr.Textbox(label="Ask about the document")
|
43 |
output_text = gr.Textbox(label="Answer - Mistral Model")
|
44 |
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
|
|
47 |
return answer
|
48 |
|
49 |
# Create Gradio Interface
|
|
|
3 |
import asyncio
|
4 |
from langchain_core.prompts import PromptTemplate
|
5 |
from langchain_community.document_loaders import PyPDFLoader
|
|
|
6 |
import torch
|
7 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
8 |
|
|
|
41 |
input_question = gr.Textbox(label="Ask about the document")
|
42 |
output_text = gr.Textbox(label="Answer - Mistral Model")
|
43 |
|
44 |
+
def pdf_qa(file, question):
|
45 |
+
if file is None:
|
46 |
+
return "Please upload a PDF file first."
|
47 |
+
|
48 |
+
loop = asyncio.get_event_loop()
|
49 |
+
answer = loop.run_until_complete(initialize(file.name, question))
|
50 |
return answer
|
51 |
|
52 |
# Create Gradio Interface
|