Fix padding token
#1
by
jbochi
- opened
- .gitattributes +0 -1
- README.md +2 -6
- model-q4k.gguf +0 -3
- model-q6k.gguf +0 -3
.gitattributes
CHANGED
@@ -34,4 +34,3 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
37 |
-
*.gguf filter=lfs diff=lfs merge=lfs -text
|
|
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
|
README.md
CHANGED
@@ -484,15 +484,11 @@ Find below some example scripts on how to use the model:
|
|
484 |
<details>
|
485 |
<summary> Click to expand </summary>
|
486 |
|
487 |
-
First, install the Python packages that are required:
|
488 |
-
|
489 |
-
`pip install transformers accelerate sentencepiece`
|
490 |
-
|
491 |
```python
|
492 |
-
from transformers import T5ForConditionalGeneration, T5Tokenizer
|
493 |
|
494 |
model_name = 'jbochi/madlad400-7b-mt'
|
495 |
-
model = T5ForConditionalGeneration.from_pretrained(model_name,
|
496 |
tokenizer = T5Tokenizer.from_pretrained(model_name)
|
497 |
|
498 |
text = "<2pt> I love pizza!"
|
|
|
484 |
<details>
|
485 |
<summary> Click to expand </summary>
|
486 |
|
|
|
|
|
|
|
|
|
487 |
```python
|
488 |
+
from transformers import T5ForConditionalGeneration, T5Tokenizer, GenerationConfig
|
489 |
|
490 |
model_name = 'jbochi/madlad400-7b-mt'
|
491 |
+
model = T5ForConditionalGeneration.from_pretrained(model_name, device="auto")
|
492 |
tokenizer = T5Tokenizer.from_pretrained(model_name)
|
493 |
|
494 |
text = "<2pt> I love pizza!"
|
model-q4k.gguf
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:bcf42f3f2db84d171585de558c17b725a166fcae7b76a4c9ba0edd47c88bdf96
|
3 |
-
size 4668768576
|
|
|
|
|
|
|
|
model-q6k.gguf
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f56153becae886067316a5fa529127fce0b6db16e662a904b5709388f9b62545
|
3 |
-
size 6807667008
|
|
|
|
|
|
|
|