pedro commited on
Commit
8d6659b
1 Parent(s): 85e28c3

fixed probability calc

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import gradio as gr
2
  from transformers import AutoModelForSequenceClassification, AutoTokenizer
3
- from torch.nn.functional import softmax
4
 
5
 
6
  model_name = "Ngit/MiniLMv2-L6-H384-goemotions-v2"
@@ -14,13 +14,13 @@ def evaluate(text):
14
  if text:
15
  input_ids = tokenizer(text, return_tensors="pt").input_ids
16
  output = model(input_ids)
17
- proba = softmax(output.logits, dim=1)[0]
18
  proba = [int(v*1000)/10 for v in proba]
19
  return proba
20
 
21
 
22
  with gr.Blocks() as demo:
23
- text = gr.Textbox(label="Text to evaluate", lines=12)
24
  with gr.Row():
25
  with gr.Group():
26
  t_adm = gr.Slider(label="admiration", value=0, maximum=100)
 
1
  import gradio as gr
2
  from transformers import AutoModelForSequenceClassification, AutoTokenizer
3
+ import numpy as np
4
 
5
 
6
  model_name = "Ngit/MiniLMv2-L6-H384-goemotions-v2"
 
14
  if text:
15
  input_ids = tokenizer(text, return_tensors="pt").input_ids
16
  output = model(input_ids)
17
+ proba = 1 / (1 + np.exp(-output.logits.detach().numpy()[0]))
18
  proba = [int(v*1000)/10 for v in proba]
19
  return proba
20
 
21
 
22
  with gr.Blocks() as demo:
23
+ text = gr.Textbox(label="Text to evaluate", lines=6)
24
  with gr.Row():
25
  with gr.Group():
26
  t_adm = gr.Slider(label="admiration", value=0, maximum=100)