Demo_space_2 / app.py
Ganesh43's picture
Create app.py
8705b8d verified
raw
history blame
1.57 kB
import streamlit as st
from transformers import AutoModelForQuestionAnswering, AutoTokenizer
# Load the pre-trained model and tokenizer
model_name = "facebook/bart-base-squad2"
model = AutoModelForQuestionAnswering.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
def answer_query(question, context):
# Preprocess the question and context using the tokenizer
inputs = tokenizer(question, context, return_tensors="pt")
# Use the model to get the answer
with torch.no_grad():
outputs = model(**inputs)
start_scores, end_scores = outputs.start_logits, outputs.end_scores
# Find the most likely answer span
answer_start = torch.argmax(start_scores)
answer_end = torch.argmax(end_scores) + 1
# Extract the answer from the context
answer = tokenizer.convert_tokens_to_string(context)[answer_start:answer_end]
return answer
# Streamlit app
st.title("Question Answering App")
# Textbox for user query
user_query = st.text_input("Enter your question:")
# File uploader for context
uploaded_file = st.file_uploader("Upload a context file (txt):")
if uploaded_file is not None:
# Read the uploaded file content
context = uploaded_file.read().decode("utf-8")
else:
# Use default context if no file uploaded
context = "This is a sample context for demonstration purposes. You can upload your own text file for context."
# Answer the query if a question is provided
if user_query:
answer = answer_query(user_query, context)
st.write(f"Answer: {answer}")
else:
st.write("Please enter a question.")