nebiyu29 commited on
Commit
8439e92
·
verified ·
1 Parent(s): 29caf41

changed the input to the model so that it can become in input ids and attention mask

Browse files
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -16,8 +16,11 @@ def model_classifier(text):
16
  if len(text)==0:
17
  return f"the input text is {text}"
18
  else:
19
- encoded_input=tokenizer(text) #this is where the encoding happens
20
- logits=model(encoded_input).logits #this is the logits of the labels
 
 
 
21
  probs_label=softmax(logits,dim=-1) #turning the probability distribution into normalize form
22
  id2label=model.config.id2label
23
  return_probs={id2label[i]:probs.item() for i,probs in enumerate(probs_label[0])}
 
16
  if len(text)==0:
17
  return f"the input text is {text}"
18
  else:
19
+ encoded_input=tokenizer(text,return_tensors="pt",truncation,padding=True,) #this is where the encoding happens
20
+ input_ids=encoded_input["input_ids"]
21
+ attention_mask=encoded_input["attention_mask"]
22
+
23
+ logits=model(input_ids,attention_mask).logits #this is the logits of the labels
24
  probs_label=softmax(logits,dim=-1) #turning the probability distribution into normalize form
25
  id2label=model.config.id2label
26
  return_probs={id2label[i]:probs.item() for i,probs in enumerate(probs_label[0])}