Update checkpoint for transformers>=4.29
#4
by
ArthurZ
HF staff
- opened
- .gitattributes +1 -0
- config.json +1 -1
- generation_config.json +1 -1
- model.safetensors +3 -0
- pytorch_model.bin +2 -2
.gitattributes
CHANGED
@@ -26,3 +26,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
28 |
*.spm filter=lfs diff=lfs merge=lfs -text
|
|
|
|
26 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
28 |
*.spm filter=lfs diff=lfs merge=lfs -text
|
29 |
+
model.safetensors filter=lfs diff=lfs merge=lfs -text
|
config.json
CHANGED
@@ -39,7 +39,7 @@
|
|
39 |
"share_encoder_decoder_embeddings": true,
|
40 |
"static_position_embeddings": true,
|
41 |
"torch_dtype": "float16",
|
42 |
-
"transformers_version": "4.
|
43 |
"use_cache": true,
|
44 |
"vocab_size": 61131
|
45 |
}
|
|
|
39 |
"share_encoder_decoder_embeddings": true,
|
40 |
"static_position_embeddings": true,
|
41 |
"torch_dtype": "float16",
|
42 |
+
"transformers_version": "4.34.0.dev0",
|
43 |
"use_cache": true,
|
44 |
"vocab_size": 61131
|
45 |
}
|
generation_config.json
CHANGED
@@ -12,5 +12,5 @@
|
|
12 |
"num_beams": 4,
|
13 |
"pad_token_id": 61130,
|
14 |
"renormalize_logits": true,
|
15 |
-
"transformers_version": "4.
|
16 |
}
|
|
|
12 |
"num_beams": 4,
|
13 |
"pad_token_id": 61130,
|
14 |
"renormalize_logits": true,
|
15 |
+
"transformers_version": "4.34.0.dev0"
|
16 |
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2af78d2ff8d53c73e88dd032506c0dff28d17fb6a828f8b572afe128dbe1647e
|
3 |
+
size 478062830
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:80b676e7d26f42f1676489ce76833274b2050109b4a9c8abf3a348479a0cc244
|
3 |
+
size 478119813
|