Add no_repeat_ngram_size to config to avoid repeatedly generating <s>.
Browse files- config.json +2 -1
config.json
CHANGED
@@ -33,5 +33,6 @@
|
|
33 |
"torch_dtype": "float32",
|
34 |
"transformers_version": "4.17.0.dev0",
|
35 |
"use_cache": true,
|
36 |
-
"vocab_size": 50265
|
|
|
37 |
}
|
|
|
33 |
"torch_dtype": "float32",
|
34 |
"transformers_version": "4.17.0.dev0",
|
35 |
"use_cache": true,
|
36 |
+
"vocab_size": 50265,
|
37 |
+
"no_repeat_ngram_size": 3
|
38 |
}
|