garrettbaber's picture
Update app.py
2e0ac74
raw
history blame
525 Bytes
import gradio as gr
from transformers import AutoModelForSequenceClassification, AutoTokenizer
def processInput(name):
#model = AutoModelForSequenceClassification.from_pretrained("garrettbaber/twitter-roberta-base-fear-intensity")
#tokenizer = AutoTokenizer.from_pretrained("garrettbaber/twitter-roberta-base-fear-intensity")
#inputs = tokenizer(data, return_tensors="pt")
#outputs = model(**inputs)
return 'hello' + name
app = gr.Interface(fn=processInput, inputs="text", outputs="text")
app.launch()