thenHung commited on
Commit
4bcc3a0
·
verified ·
1 Parent(s): 39f9e3f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +68 -0
app.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import gradio as gr
3
+ from transformers import T5Tokenizer, T5ForConditionalGeneration
4
+
5
+ # Set device
6
+ device = "mps" if torch.backends.mps.is_available() else "cuda" if torch.cuda.is_available() else "cpu"
7
+
8
+ # Load model and tokenizer
9
+ model_path = "thenHung/question_decomposer_t5"
10
+ tokenizer = T5Tokenizer.from_pretrained(model_path)
11
+ model = T5ForConditionalGeneration.from_pretrained(model_path)
12
+ model.to(device)
13
+ model.eval()
14
+
15
+ def decompose_question(question):
16
+ """
17
+ Decompose a complex question into sub-questions
18
+
19
+ Args:
20
+ question (str): Input complex question
21
+
22
+ Returns:
23
+ list: List of decomposed sub-questions
24
+ """
25
+ try:
26
+ # Prepare input
27
+ input_text = f"decompose question: {question}"
28
+ input_ids = tokenizer(
29
+ input_text,
30
+ max_length=128,
31
+ padding="max_length",
32
+ truncation=True,
33
+ return_tensors="pt"
34
+ ).input_ids.to(device)
35
+
36
+ # Generate sub-questions
37
+ with torch.no_grad():
38
+ outputs = model.generate(
39
+ input_ids,
40
+ max_length=128,
41
+ num_beams=4,
42
+ early_stopping=True
43
+ )
44
+
45
+ # Decode and split output
46
+ decoded_output = tokenizer.decode(outputs[0], skip_special_tokens=True)
47
+ sub_questions = decoded_output.split(" [SEP] ")
48
+
49
+ return sub_questions
50
+ except Exception as e:
51
+ return [f"Error: {str(e)}"]
52
+
53
+ # Create Gradio interface
54
+ demo = gr.Interface(
55
+ fn=decompose_question,
56
+ inputs=gr.Textbox(label="Enter your complex question"),
57
+ outputs=gr.JSON(label="Decomposed Sub-Questions"),
58
+ title="Question Decomposer",
59
+ description="Breaks down complex questions into simpler sub-questions using a T5 model",
60
+ examples=[
61
+ "Who is taller between John and Mary?",
62
+ "What is the capital of Vietnam and the largest city in Vietnam?",
63
+ ]
64
+ )
65
+
66
+ # Launch the app
67
+ if __name__ == "__main__":
68
+ demo.launch()