Makima57 commited on
Commit
115fce4
1 Parent(s): 145c69e

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +45 -0
app.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import gradio as gr
3
+ import ctranslate2
4
+ from transformers import AutoTokenizer
5
+
6
+ # Define the model and tokenizer loading
7
+ model_prompt = "Solve the following mathematical problem: "
8
+ tokenizer = AutoTokenizer.from_pretrained("AI-MO/NuminaMath-7B-TIR")
9
+ model_path = "/kaggle/working/deepseek-math-Numina"
10
+ generator = ctranslate2.Generator(model_path, device="cpu", compute_type="int8")
11
+
12
+ # Function to generate predictions using the model
13
+ def get_prediction(question):
14
+ input_text = model_prompt + question
15
+ input_tokens = tokenizer.tokenize(input_text)
16
+ results = generator.generate_batch([input_tokens])
17
+ output_tokens = results[0].sequences[0]
18
+ predicted_answer = tokenizer.convert_tokens_to_string(output_tokens)
19
+ return predicted_answer
20
+
21
+ # Gradio interface for user input and output
22
+ def gradio_interface(question, correct_answer):
23
+ predicted_answer = get_prediction(question)
24
+ return {
25
+ "question": question,
26
+ "predicted_answer": predicted_answer,
27
+ "correct_answer": correct_answer,
28
+ }
29
+
30
+ # Gradio app setup
31
+ interface = gr.Interface(
32
+ fn=gradio_interface,
33
+ inputs=[
34
+ gr.Textbox(label="Math Question"),
35
+ gr.Textbox(label="Correct Answer"),
36
+ ],
37
+ outputs=[
38
+ gr.JSON(label="Results")
39
+ ],
40
+ title="Math Question Solver",
41
+ description="Enter a math question to get the model prediction."
42
+ )
43
+
44
+ if __name__ == "__main__":
45
+ interface.launch()