import gradio as gr import os os.system("pip install transformers sentencepiece torch") from transformers import M2M100ForConditionalGeneration from tokenization_small100 import SMALL100Tokenizer model = M2M100ForConditionalGeneration.from_pretrained("alirezamsh/small100") tokenizer = SMALL100Tokenizer.from_pretrained("alirezamsh/small100") def fn(text, lang): tokenizer.tgt_lang = lang encoded = tokenizer(text, return_tensors="pt") generated_tokens = model.generate(**encoded) return tokenizer.batch_decode(generated_tokens, skip_special_tokens=True) demo = gr.Interface(fn=fn, inputs=["text", "text"], outputs="text") demo.launch()