liudongqing commited on
Commit
3e60b85
·
1 Parent(s): 28092bb

Use the open api to do the score

Browse files
Files changed (2) hide show
  1. app.py +43 -7
  2. requirements.txt +2 -1
app.py CHANGED
@@ -4,7 +4,13 @@ from threading import Thread
4
  import gradio as gr
5
  import spaces
6
  from PIL import Image
 
 
7
 
 
 
 
 
8
 
9
  # from zipnn import zipnn_hf
10
 
@@ -22,10 +28,10 @@ processor = AutoProcessor.from_pretrained(model_id)
22
 
23
 
24
  @spaces.GPU
25
- def score_it(input_img):
26
  # Convert numpy array to PIL Image
27
  pil_image = Image.fromarray(input_img.astype('uint8'))
28
-
29
  # Now use the PIL Image as before
30
  image = pil_image.convert("RGB").resize((224, 224))
31
 
@@ -34,13 +40,43 @@ def score_it(input_img):
34
 
35
  output = model.generate(**inputs, max_new_tokens=200)
36
  return processor.decode(output[0])
37
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
 
40
  demo = gr.Interface(fn=score_it, title="Upload your English script and get the score",
41
- inputs=[gr.Image()],
42
- outputs=['text'],
43
- stop_btn="Stop Generation",
44
- )
 
45
 
46
  demo.launch(debug=True)
 
4
  import gradio as gr
5
  import spaces
6
  from PIL import Image
7
+ from huggingface_hub import InferenceClient
8
+ import os
9
 
10
+ client = InferenceClient(
11
+ "openai/gpt-4o-mini", # or other models like "openai/gpt-4"
12
+ token=os.environ["OPENAI_API_KEY"] # Your OpenAI API key
13
+ )
14
 
15
  # from zipnn import zipnn_hf
16
 
 
28
 
29
 
30
  @spaces.GPU
31
+ def ocr(input_img):
32
  # Convert numpy array to PIL Image
33
  pil_image = Image.fromarray(input_img.astype('uint8'))
34
+
35
  # Now use the PIL Image as before
36
  image = pil_image.convert("RGB").resize((224, 224))
37
 
 
40
 
41
  output = model.generate(**inputs, max_new_tokens=200)
42
  return processor.decode(output[0])
43
+
44
+
45
+ prompt = '''Review the following essay and score it. Output the score and the reasoning.The output format is:
46
+ Score: *<score>*
47
+ Reasoning: 1... 2... 3... 4... 5...
48
+ '''
49
+
50
+ def score_with_gpt(text):
51
+ messages = [
52
+ {"role": "system", "content":
53
+ "You are a high school English teacher,"
54
+ + "and you are grading a student's English essay. You are given a student's essay, and you need to score it based on the following criteria:"
55
+ + "1. The essay is well-organized and flows logically."
56
+ + "2. The essay is well-written and uses correct grammar and punctuation."
57
+ + "3. The words are spelled correctly."
58
+ + "4. The words and phrases are appropriate for the context."
59
+ + "5. Scale the score from 0 to 100."
60
+ },
61
+ {"role": "user", "content": text}
62
+ ]
63
+
64
+ return client.chat_completion(
65
+ messages=messages,
66
+ max_tokens=500
67
+ )
68
+
69
+
70
+ def score_it(input_img):
71
+ text = ocr(input_img)
72
+ return score_with_gpt(text)
73
 
74
 
75
  demo = gr.Interface(fn=score_it, title="Upload your English script and get the score",
76
+ inputs=[gr.Image()],
77
+ outputs=[gr.Textbox(label="Text", lines=10), gr.Textbox(
78
+ label="Score", lines=10)],
79
+ stop_btn="Stop Generation",
80
+ )
81
 
82
  demo.launch(debug=True)
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  torch==2.4.0
2
  transformers==4.46.2
3
  accelerate>=0.26.0
4
- zipnn==0.3.4
 
 
1
  torch==2.4.0
2
  transformers==4.46.2
3
  accelerate>=0.26.0
4
+ zipnn==0.3.4
5
+ huggingface_hub