Commit
·
47c92ce
1
Parent(s):
f0bf410
Include the new reference to the minimum and maximum new tokens, and set use_cache to true.
Browse files- config.json +3 -1
config.json
CHANGED
@@ -46,7 +46,9 @@
|
|
46 |
"max_decoder_position_embeddings": 1024,
|
47 |
"max_encoder_position_embeddings": 16384,
|
48 |
"max_length": 1024,
|
|
|
49 |
"min_length": 100,
|
|
|
50 |
"model_type": "led",
|
51 |
"no_repeat_ngram_size": 3,
|
52 |
"num_beams": 4,
|
@@ -54,6 +56,6 @@
|
|
54 |
"pad_token_id": 1,
|
55 |
"torch_dtype": "float32",
|
56 |
"transformers_version": "4.30.2",
|
57 |
-
"use_cache":
|
58 |
"vocab_size": 50265
|
59 |
}
|
|
|
46 |
"max_decoder_position_embeddings": 1024,
|
47 |
"max_encoder_position_embeddings": 16384,
|
48 |
"max_length": 1024,
|
49 |
+
"max_new_tokens": 1024,
|
50 |
"min_length": 100,
|
51 |
+
"min_new_tokens": 100,
|
52 |
"model_type": "led",
|
53 |
"no_repeat_ngram_size": 3,
|
54 |
"num_beams": 4,
|
|
|
56 |
"pad_token_id": 1,
|
57 |
"torch_dtype": "float32",
|
58 |
"transformers_version": "4.30.2",
|
59 |
+
"use_cache": true,
|
60 |
"vocab_size": 50265
|
61 |
}
|