Upload config
Browse files- config.json +1 -5
config.json
CHANGED
@@ -1,8 +1,5 @@
|
|
1 |
{
|
2 |
"_commit_hash": null,
|
3 |
-
"architectures": [
|
4 |
-
"VisionEncoderDecoderModel"
|
5 |
-
],
|
6 |
"decoder": {
|
7 |
"_name_or_path": "imvladikon/alephbertgimmel-base-512",
|
8 |
"add_cross_attention": true,
|
@@ -170,13 +167,12 @@
|
|
170 |
"eos_token_id": 2,
|
171 |
"is_encoder_decoder": true,
|
172 |
"length_penalty": 2.0,
|
173 |
-
"max_length":
|
174 |
"model_type": "vision-encoder-decoder",
|
175 |
"no_repeat_ngram_size": 3,
|
176 |
"num_beams": 4,
|
177 |
"pad_token_id": 3,
|
178 |
"tie_word_embeddings": false,
|
179 |
-
"torch_dtype": "float32",
|
180 |
"transformers_version": null,
|
181 |
"vocab_size": 128000
|
182 |
}
|
|
|
1 |
{
|
2 |
"_commit_hash": null,
|
|
|
|
|
|
|
3 |
"decoder": {
|
4 |
"_name_or_path": "imvladikon/alephbertgimmel-base-512",
|
5 |
"add_cross_attention": true,
|
|
|
167 |
"eos_token_id": 2,
|
168 |
"is_encoder_decoder": true,
|
169 |
"length_penalty": 2.0,
|
170 |
+
"max_length": 4,
|
171 |
"model_type": "vision-encoder-decoder",
|
172 |
"no_repeat_ngram_size": 3,
|
173 |
"num_beams": 4,
|
174 |
"pad_token_id": 3,
|
175 |
"tie_word_embeddings": false,
|
|
|
176 |
"transformers_version": null,
|
177 |
"vocab_size": 128000
|
178 |
}
|