## How to use
```python
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
BIGTRANSLATE_LANG_TABLE = {
"zh": "汉语",
"es": "西班牙语",
"fr": "法语",
"de": "德语",
"hi": "印地语",
"pt": "葡萄牙语",
"tr": "土耳其语",
"en": "英语",
"ja": "日语"
}
def get_prompt(src_lang, tgt_lang, src_sentence):
translate_instruct = f"请将以下{BIGTRANSLATE_LANG_TABLE[src_lang]}句子翻译成{BIGTRANSLATE_LANG_TABLE[tgt_lang]}:{src_sentence}"
return (
"以下是一个描述任务的指令,请写一个完成该指令的适当回复。\n\n"
f"### 指令:\n{translate_instruct}\n\n### 回复:")
def translate(input_text, src_lang, trg_lang):
prompt = get_prompt(src_lang, trg_lang, input_text)
input_ids = tokenizer(prompt, return_tensors="pt")
generated_tokens = model.generate(**input_ids, max_new_tokens=256)[0]
return tokenizer.decode(generated_tokens, skip_special_tokens=True)[len(prompt):]
translation = translate("set the temperature on my thermostat to 29 degrees ", "en", "de") # translation: stell die temperatur auf meinem thermostat auf 29 grad
```
## Model fine tuning code
https://github.com/Samsung/MT-LLM-NLU/tree/main/BigTranslateFineTuning