william4416 commited on
Commit
b853635
1 Parent(s): fa6445f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -1
app.py CHANGED
@@ -1,3 +1,36 @@
1
  import gradio as gr
 
2
 
3
- gr.load("models/william4416/bewtesttwo").launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ from transformers import pipeline
3
 
4
+ # Load the model
5
+ qa_pipeline = pipeline("question-answering", model="william4416/bewtesttwo")
6
+
7
+ # Define the function to process the JSONL file
8
+ def process_jsonl(file_path):
9
+ with open(file_path, "r", encoding="utf-8") as f:
10
+ data = f.readlines()
11
+ return [eval(line) for line in data]
12
+
13
+ # Define the function to answer questions from the model
14
+ def answer_question(context, question):
15
+ # Process the context from the JSONL file
16
+ contexts = [item["context"] for item in context]
17
+ # Perform question answering
18
+ answers = []
19
+ for ctxt in contexts:
20
+ answer = qa_pipeline(question=question, context=ctxt)
21
+ answers.append(answer["answer"])
22
+ return answers
23
+
24
+ # Create the interface
25
+ context_input = gr.inputs.File(label="utsdata.jsonl")
26
+ question_input = gr.inputs.Textbox(label="Enter your question", lines=3)
27
+ output_text = gr.outputs.Textbox(label="Answer")
28
+
29
+ # Create the interface
30
+ gr.Interface(
31
+ fn=answer_question,
32
+ inputs=[context_input, question_input],
33
+ outputs=output_text,
34
+ title="Question Answering with Hugging Face Transformers",
35
+ description="Upload a JSONL file containing contexts and ask a question to get answers.",
36
+ ).launch()