fvwjtemlfmyztvgzla's picture
Update app.py
6254ce6
raw
history blame contribute delete
604 Bytes
import gradio as gr
import os
os.system("pip install transformers sentencepiece torch")
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
tokenizer = AutoTokenizer.from_pretrained("alirezamsh/small100")
model = AutoModelForSeq2SeqLM.from_pretrained("alirezamsh/small100")
def fn(text, lang):
tokenizer.tgt_lang = lang
encoded_zh = tokenizer(text, return_tensors="pt")
generated_tokens = model.generate(**encoded_zh)
return tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
demo = gr.Interface(fn=fn, inputs=["text", "text"], outputs="text")
demo.launch()