import gradio as gr from transformers import AutoModelForSeq2SeqLM, NllbTokenizer # Load the pre-trained model and tokenizer model_name = "sarahai/nllb-ru-uz" model = AutoModelForSeq2SeqLM.from_pretrained(model_name) tokenizer = NllbTokenizer.from_pretrained(model_name) def translate(text, src_lang="rus_Cyrl", tgt_lang="uzn_Latn"): """Translates text from source to target language.""" inputs = tokenizer(text, return_tensors="pt") translated = model.generate(**inputs) return tokenizer.batch_decode(translated, skip_special_tokens=True)[0] # Define the Gradio interface interface = gr.Interface( fn=translate, inputs=[gr.Textbox(label="Text to Translate"), gr.Dropdown(choices=["ru", "uz"], label="Source Language"), gr.Dropdown(choices=["uz"], label="Target Language")], outputs="textbox", title="Russian to Uzbek Translator", description="Translate text from Russian to Uzbek using the `sarahai/nllb-ru-uz` model.", ) # Launch the Gradio app interface.launch(share=True, debug=True) # Set share=True to create a Hugging Face Space