Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,34 +1,6 @@
|
|
1 |
-
import torch
|
2 |
import streamlit as st
|
3 |
-
from
|
4 |
-
|
5 |
-
# Utilize BertForQuestionAnswering model for direct start/end logits
|
6 |
-
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
|
7 |
-
model = BertForQuestionAnswering.from_pretrained("bert-base-uncased")
|
8 |
-
|
9 |
-
def answer_query(question, context):
|
10 |
-
# Preprocess using tokenizer
|
11 |
-
inputs = tokenizer(question, context, return_tensors="pt")
|
12 |
-
|
13 |
-
# Use model for question answering
|
14 |
-
with torch.no_grad():
|
15 |
-
outputs = model(**inputs)
|
16 |
-
|
17 |
-
# Retrieve logits directly
|
18 |
-
start_logits = outputs.start_logits
|
19 |
-
end_logits = outputs.end_logits
|
20 |
-
|
21 |
-
# Find answer span
|
22 |
-
answer_start = torch.argmax(start_logits)
|
23 |
-
answer_end = torch.argmax(end_logits) + 1
|
24 |
-
|
25 |
-
# Extract answer from context
|
26 |
-
answer = tokenizer.convert_tokens_to_string(
|
27 |
-
tokenizer.convert_ids_to_tokens(inputs["input_ids"][0]) # Access original tokens
|
28 |
-
)[answer_start:answer_end]
|
29 |
-
|
30 |
-
return answer
|
31 |
-
|
32 |
|
33 |
# Streamlit app
|
34 |
st.title("Question Answering App")
|
@@ -36,19 +8,22 @@ st.title("Question Answering App")
|
|
36 |
# Textbox for user query
|
37 |
user_query = st.text_input("Enter your question:")
|
38 |
|
39 |
-
# File uploader for context
|
40 |
-
uploaded_file = st.file_uploader("Upload a
|
41 |
|
42 |
if uploaded_file is not None:
|
43 |
-
|
44 |
-
|
|
|
|
|
|
|
45 |
else:
|
46 |
-
|
47 |
-
|
48 |
|
49 |
# Answer the query if a question is provided
|
50 |
if user_query:
|
51 |
-
|
52 |
-
|
53 |
else:
|
54 |
-
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
from data_preprocessing import preprocess_csv
|
3 |
+
from question_answering import answer_from_csv
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
|
5 |
# Streamlit app
|
6 |
st.title("Question Answering App")
|
|
|
8 |
# Textbox for user query
|
9 |
user_query = st.text_input("Enter your question:")
|
10 |
|
11 |
+
# File uploader for context (Hugging Face specific)
|
12 |
+
uploaded_file = st.file_uploader("Upload a CSV file from Hugging Face Hub:", type="text")
|
13 |
|
14 |
if uploaded_file is not None:
|
15 |
+
# Extract file URL
|
16 |
+
file_url = uploaded_file.url
|
17 |
+
|
18 |
+
# Preprocess the CSV data
|
19 |
+
context = preprocess_csv(file_url)
|
20 |
else:
|
21 |
+
# Use default context (optional)
|
22 |
+
context = "This is a sample context for demonstration purposes. You can upload your own text file or CSV file for context."
|
23 |
|
24 |
# Answer the query if a question is provided
|
25 |
if user_query:
|
26 |
+
answer = answer_from_csv(user_query, context)
|
27 |
+
st.write(f"Answer: {answer}")
|
28 |
else:
|
29 |
+
st.write("Please enter a question.")
|