da03 commited on
Commit
39a2dae
1 Parent(s): b2ef87d
Files changed (1) hide show
  1. app.py +6 -1
app.py CHANGED
@@ -18,21 +18,26 @@ def postprocess(raw_output):
18
 
19
  @spaces.GPU
20
  def predict_product(num1, num2):
 
21
  input_text = f'{preprocess(num1)} * {preprocess(num2)} ='
 
22
  inputs = tokenizer(input_text, return_tensors='pt').to('cuda' if torch.cuda.is_available() else 'cpu')
23
  model.to('cuda' if torch.cuda.is_available() else 'cpu')
 
 
24
  outputs = model.generate(**inputs, max_new_tokens=40)
 
25
  output = outputs[0][inputs['input_ids'].shape[-1]:]
26
  raw_output = tokenizer.decode(output, skip_special_tokens=True)
27
  prediction = postprocess(raw_output)
28
 
 
29
  try:
30
  num1_int = int(num1)
31
  num2_int = int(num2)
32
  valid_input = True
33
  except ValueError:
34
  valid_input = False
35
-
36
  if valid_input:
37
  correct_product = str(num1_int * num2_int)
38
  is_correct = (prediction == correct_product)
 
18
 
19
  @spaces.GPU
20
  def predict_product(num1, num2):
21
+ # Reverse input digits and add spaces
22
  input_text = f'{preprocess(num1)} * {preprocess(num2)} ='
23
+
24
  inputs = tokenizer(input_text, return_tensors='pt').to('cuda' if torch.cuda.is_available() else 'cpu')
25
  model.to('cuda' if torch.cuda.is_available() else 'cpu')
26
+
27
+ # Generate output
28
  outputs = model.generate(**inputs, max_new_tokens=40)
29
+
30
  output = outputs[0][inputs['input_ids'].shape[-1]:]
31
  raw_output = tokenizer.decode(output, skip_special_tokens=True)
32
  prediction = postprocess(raw_output)
33
 
34
+ # Evalaute the correctness of the result
35
  try:
36
  num1_int = int(num1)
37
  num2_int = int(num2)
38
  valid_input = True
39
  except ValueError:
40
  valid_input = False
 
41
  if valid_input:
42
  correct_product = str(num1_int * num2_int)
43
  is_correct = (prediction == correct_product)