ArthurZ HF staff commited on
Commit
3514146
1 Parent(s): b24b76a

Update `max_length` param

Browse files
Files changed (1) hide show
  1. config.json +7 -1
config.json CHANGED
@@ -1,6 +1,10 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
 
 
 
4
  "attention_dropout": 0.0,
5
  "begin_suppress_tokens": [
6
  220,
@@ -35,6 +39,7 @@
35
  ],
36
  "init_std": 0.02,
37
  "is_encoder_decoder": true,
 
38
  "max_source_positions": 1500,
39
  "max_target_positions": 448,
40
  "model_type": "whisper",
@@ -132,7 +137,8 @@
132
  50361,
133
  50362
134
  ],
135
- "transformers_version": "4.23.0.dev0",
 
136
  "use_cache": true,
137
  "vocab_size": 51865
138
  }
 
1
  {
2
+ "_name_or_path": "openai/whisper-small",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
+ "architectures": [
6
+ "WhisperForConditionalGeneration"
7
+ ],
8
  "attention_dropout": 0.0,
9
  "begin_suppress_tokens": [
10
  220,
 
39
  ],
40
  "init_std": 0.02,
41
  "is_encoder_decoder": true,
42
+ "max_length": 448,
43
  "max_source_positions": 1500,
44
  "max_target_positions": 448,
45
  "model_type": "whisper",
 
137
  50361,
138
  50362
139
  ],
140
+ "torch_dtype": "float32",
141
+ "transformers_version": "4.24.0.dev0",
142
  "use_cache": true,
143
  "vocab_size": 51865
144
  }