Spaces:
Sleeping
Sleeping
File size: 1,687 Bytes
d67d1a3 4c812e9 d67d1a3 4c812e9 d67d1a3 4c812e9 d67d1a3 4c812e9 b548fdc 4c812e9 d67d1a3 f89c9c1 4c812e9 d67d1a3 4c812e9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
import gradio as gr
from transformers import pipeline
import pdfplumber
# Load the pre-trained question-answering model
qa_pipeline = pipeline("question-answering", model="distilbert-base-cased-distilled-squad")
# Shared variable to store uploaded PDF text
pdf_text = ""
# Function to load the PDF and store its text
def load_pdf(file):
global pdf_text
try:
with pdfplumber.open(file) as pdf:
pdf_text = ""
for page in pdf.pages:
pdf_text += page.extract_text()
return "PDF loaded successfully."
except Exception as e:
return f"Error processing PDF: {str(e)}"
# Function to answer the user's question based on the loaded PDF
def answer_question(question):
if not pdf_text:
return "No PDF loaded. Upload a PDF first."
try:
# Ask the user's question using the question-answering model
answer = qa_pipeline({"context": pdf_text, "question": question})
return answer["answer"]
except Exception as e:
return f"Error answering question: {str(e)}"
# Interface for uploading the PDF
pdf_interface = gr.Interface(
fn=load_pdf,
inputs=gr.File(label="Upload PDF"),
outputs="text",
live=True,
title="PDF Uploader",
description="Upload a PDF to load its content.",
)
# Interface for answering questions based on the loaded PDF
qa_interface = gr.Interface(
fn=answer_question,
inputs=gr.Textbox(label="Enter Question", type="text"),
outputs="text",
live=True,
title="PDF Question-Answering",
description="Enter a question to get an answer based on the loaded PDF.",
)
pdf_interface.launch()
qa_interface.launch()
|