File size: 1,999 Bytes
8183435
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
df10ddc
8183435
 
 
 
 
 
 
 
 
 
 
a8a5280
8183435
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7ec0c9c
 
 
 
 
8183435
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
INDIC = {
"Hindi":"hin_Deva",
"English":"eng_Latn",
"Kannada": "kan_Knda",
"Tamil":"tam_Taml",
"Bengali":"ben_Beng",
"Nepali" : "npi_Deva",
"Telugu":"tel_Telu",
"Gujarati":"guj_Gujr",
"Marathi":"mar_Deva",
"Odia":"ory_Orya",
"Sanskrit":"san_Deva",
"Maithili":"mai_Deva",
"Urdu":"urd_Arab",
"Punjabi" : "pan_Guru",
"Malayalam" :"mal_Mlym",
"Assamese" : "asm_Beng"
}

from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
import gradio as grd

model_name='facebook/nllb-200-distilled-600M'

model=AutoModelForSeq2SeqLM.from_pretrained(model_name)
tokenizer=AutoTokenizer.from_pretrained(model_name)




def translate(text, src_lang, tgt_lang):
  translator=pipeline('translation', model=model, tokenizer=tokenizer, src_lang=INDIC[src_lang], tgt_lang=INDIC[tgt_lang], max_length=200)
  return translator(text)[0]['translation_text']



languages = list(INDIC.keys())

iddwn = grd.inputs.Dropdown(languages, type="value", default="English", label="Select Source Language")
oddwn = grd.inputs.Dropdown(languages, type="value", default="Hindi", label="Select Target Language")

txt = grd.inputs.Textbox( lines=5, placeholder="Enter Text to translate", default="", label="Enter Text in Source Language")
txt_ouptut = grd.outputs.Textbox(type="auto", label="Translated text in Target Language")

example=[['I want to translate this sentence in Hindi','English','Hindi'],
        ['I am feeling very good today.', 'English','Bengali']]

supp = ', '.join(languages)
iface = grd.Interface(fn=translate, inputs=[txt,iddwn, oddwn] , 
                      outputs=txt_ouptut, title='Translation for 15 Indic Languages', css="footer {visibility: hidden}",
                      description = 'This is a demo based on NLLB by Meta. Supported: '+supp, 
                      article = 'Original repo [link](https://github.com/facebookresearch/fairseq/tree/nllb) by MetaAI. Contact @harveenchadha at Twitter for any issues.', 
                      examples=example)
iface.launch(enable_queue=True)