fsicoli commited on
Commit
7bc2f20
1 Parent(s): fcc65b1

Upload 10 files

Browse files
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "_name_or_path": "openai/whisper-large-v3",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
- "apply_spec_augment": false,
6
  "architectures": [
7
  "WhisperForConditionalGeneration"
8
  ],
@@ -43,10 +43,9 @@
43
  "num_mel_bins": 128,
44
  "pad_token_id": 50256,
45
  "scale_embedding": false,
46
- "suppress_tokens": [],
47
- "torch_dtype": "float32",
48
  "transformers_version": "4.37.0.dev0",
49
- "use_cache": false,
50
  "use_weighted_layer_sum": false,
51
  "vocab_size": 51866
52
  }
 
2
  "_name_or_path": "openai/whisper-large-v3",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
+ "apply_spec_augment": true,
6
  "architectures": [
7
  "WhisperForConditionalGeneration"
8
  ],
 
43
  "num_mel_bins": 128,
44
  "pad_token_id": 50256,
45
  "scale_embedding": false,
46
+ "torch_dtype": "float16",
 
47
  "transformers_version": "4.37.0.dev0",
48
+ "use_cache": true,
49
  "use_weighted_layer_sum": false,
50
  "vocab_size": 51866
51
  }
runs/Jan17_11-35-18_SEPINF-BW03A/events.out.tfevents.1705503273.SEPINF-BW03A.8676.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:513c9742a13a50d19361ce5cc114b223ce033a343915ddbfe14ab332509265f7
3
+ size 11790
vocab.json CHANGED
The diff for this file is too large to render. See raw diff