garrettbaber's picture
Update app.py
416a17c
raw
history blame
649 Bytes
import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer
def processInput(input):
#load model and tokenizer
model = AutoModelForSequenceClassification.from_pretrained("garrettbaber/twitter-roberta-base-fear-intensity")
tokenizer = AutoTokenizer.from_pretrained("garrettbaber/twitter-roberta-base-fear-intensity")
#get tokens
tokens = tokenizer(input, return_tensors="pt")
#pass tokens to model
outputs = model(**tokens)
#parse output
logits = outputs.get("logits")
return logits.tolist()
app = gr.Interface(fn=processInput, inputs="text", outputs="text")
app.launch()