import torch from transformers import * model_name = 'Helsinki-NLP/opus-mt-en-hi' tokenizer = MarianTokenizer.from_pretrained(model_name) model = MarianMTModel.from_pretrained(model_name) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') model.to(device) def translate_english_to_telugu(text): inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True, max_length=512).to(device) translation = model.generate(**inputs) translated_text = tokenizer.decode(translation[0], skip_special_tokens=True) return translated_text input_text = input('Enter text: ') translated_text = translate_english_to_telugu(input_text) print('Hindi translation: ', translated_text)