micole66's picture
Update app.py
60cb125
raw
history blame
853 Bytes
import gradio as gr
import transformers
tokenizer = transformers.AutoTokenizer.from_pretrained("xlm-roberta-large")
model = transformers.AutoModelForSequenceClassification.from_pretrained("xlm-roberta-large", num_labels=2)
def predict(first_option, second_option):
input_ids = tokenizer.encode(first_option, second_option, return_tensors="pt", truncation=True, padding=True)
output = model(input_ids)[0]
result = torch.argmax(output)
return first_option if result == 0 else second_option
inputs = [gr.inputs.Textbox(label="Option 1"), gr.inputs.Textbox(label="Option 2")]
output = gr.outputs.Textbox(label="Chosen Option")
interface = gr.Interface(fn=predict, inputs=inputs, outputs=output, title="Decision Making with XLM-Roberta-Large", description="Input your two options and let XLM-Roberta-Large choose one.")
interface.launch()