Ganesh43 commited on
Commit
e32de15
1 Parent(s): e28f0eb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -19
app.py CHANGED
@@ -7,22 +7,25 @@ tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
7
  model = BertModel.from_pretrained("bert-base-uncased")
8
 
9
  def answer_query(question, context):
10
- # Preprocess the question and context using the tokenizer
11
- inputs = tokenizer(question, context, return_tensors="pt")
12
 
13
- # Use the model to get the answer
14
- with torch.no_grad():
15
- outputs = model(**inputs)
16
- start_scores, end_scores = outputs.start_logits, outputs.end_scores
17
 
18
- # Find the most likely answer span
19
- answer_start = torch.argmax(start_scores)
20
- answer_end = torch.argmax(end_scores) + 1
21
 
22
- # Extract the answer from the context
23
- answer = tokenizer.convert_tokens_to_string(context)[answer_start:answer_end]
 
24
 
25
- return answer
 
 
 
26
 
27
  # Streamlit app
28
  st.title("Question Answering App")
@@ -34,15 +37,15 @@ user_query = st.text_input("Enter your question:")
34
  uploaded_file = st.file_uploader("Upload a context file (txt):")
35
 
36
  if uploaded_file is not None:
37
- # Read the uploaded file content
38
- context = uploaded_file.read().decode("utf-8")
39
  else:
40
- # Use default context if no file uploaded
41
- context = "This is a sample context for demonstration purposes. You can upload your own text file for context."
42
 
43
  # Answer the query if a question is provided
44
  if user_query:
45
- answer = answer_query(user_query, context)
46
- st.write(f"Answer: {answer}")
47
  else:
48
- st.write("Please enter a question.")
 
7
  model = BertModel.from_pretrained("bert-base-uncased")
8
 
9
  def answer_query(question, context):
10
+ # Preprocess the question and context using the tokenizer
11
+ inputs = tokenizer(question, context, return_tensors="pt")
12
 
13
+ # Use the model to get the answer
14
+ with torch.no_grad():
15
+ outputs = model(**inputs)
 
16
 
17
+ # Access the logits from the model's output structure
18
+ start_logits = outputs.hidden_states[-1][:, 0, :] # Access from hidden states
19
+ end_logits = outputs.hidden_states[-1][:, 1, :]
20
 
21
+ # Find the most likely answer span
22
+ answer_start = torch.argmax(start_logits)
23
+ answer_end = torch.argmax(end_logits) + 1
24
 
25
+ # Extract the answer from the context
26
+ answer = tokenizer.convert_tokens_to_string(context)[answer_start:answer_end]
27
+
28
+ return answer
29
 
30
  # Streamlit app
31
  st.title("Question Answering App")
 
37
  uploaded_file = st.file_uploader("Upload a context file (txt):")
38
 
39
  if uploaded_file is not None:
40
+ # Read the uploaded file content
41
+ context = uploaded_file.read().decode("utf-8")
42
  else:
43
+ # Use default context if no file uploaded
44
+ context = "This is a sample context for demonstration purposes. You can upload your own text file for context."
45
 
46
  # Answer the query if a question is provided
47
  if user_query:
48
+ answer = answer_query(user_query, context)
49
+ st.write(f"Answer: {answer}")
50
  else:
51
+ st.write("Please enter a question.")