Update README.md
Browse files
README.md
CHANGED
@@ -24,8 +24,8 @@ With a commitment to quality and innovation, our translation model not only tran
|
|
24 |
### On GPU
|
25 |
```python
|
26 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
27 |
-
tokenizer = AutoTokenizer.from_pretrained("minhtoan/t5-
|
28 |
-
model = AutoModelForSeq2SeqLM.from_pretrained("minhtoan/t5-
|
29 |
model.cuda()
|
30 |
src = "ຂ້ອຍຮັກເຈົ້າ"
|
31 |
tokenized_text = tokenizer.encode(src, return_tensors="pt").cuda()
|
@@ -39,8 +39,8 @@ output
|
|
39 |
### On CPU
|
40 |
```python
|
41 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
42 |
-
tokenizer = AutoTokenizer.from_pretrained("minhtoan/t5-
|
43 |
-
model = AutoModelForSeq2SeqLM.from_pretrained("minhtoan/t5-
|
44 |
src = "ຂ້ອຍຮັກເຈົ້າ"
|
45 |
input_ids = tokenizer(src, max_length=200, return_tensors="pt", padding="max_length", truncation=True).input_ids
|
46 |
outputs = model.generate(input_ids=input_ids, max_new_tokens=140)
|
|
|
24 |
### On GPU
|
25 |
```python
|
26 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
27 |
+
tokenizer = AutoTokenizer.from_pretrained("minhtoan/t5-translate-lao-english")
|
28 |
+
model = AutoModelForSeq2SeqLM.from_pretrained("minhtoan/t5-translate-lao-english")
|
29 |
model.cuda()
|
30 |
src = "ຂ້ອຍຮັກເຈົ້າ"
|
31 |
tokenized_text = tokenizer.encode(src, return_tensors="pt").cuda()
|
|
|
39 |
### On CPU
|
40 |
```python
|
41 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
42 |
+
tokenizer = AutoTokenizer.from_pretrained("minhtoan/t5-translate-lao-english")
|
43 |
+
model = AutoModelForSeq2SeqLM.from_pretrained("minhtoan/t5-translate-lao-english")
|
44 |
src = "ຂ້ອຍຮັກເຈົ້າ"
|
45 |
input_ids = tokenizer(src, max_length=200, return_tensors="pt", padding="max_length", truncation=True).input_ids
|
46 |
outputs = model.generate(input_ids=input_ids, max_new_tokens=140)
|