null010 commited on
Commit
62e4caf
·
1 Parent(s): 8ace348

Training in progress, step 10

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "openai/whisper-tiny",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
@@ -13,17 +13,17 @@
13
  ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
- "d_model": 384,
17
- "decoder_attention_heads": 6,
18
- "decoder_ffn_dim": 1536,
19
  "decoder_layerdrop": 0.0,
20
- "decoder_layers": 4,
21
  "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
- "encoder_attention_heads": 6,
24
- "encoder_ffn_dim": 1536,
25
  "encoder_layerdrop": 0.0,
26
- "encoder_layers": 4,
27
  "eos_token_id": 50257,
28
  "forced_decoder_ids": null,
29
  "init_std": 0.02,
@@ -39,7 +39,7 @@
39
  "max_target_positions": 448,
40
  "median_filter_width": 7,
41
  "model_type": "whisper",
42
- "num_hidden_layers": 4,
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
 
1
  {
2
+ "_name_or_path": "openai/whisper-base",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
 
13
  ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
+ "d_model": 512,
17
+ "decoder_attention_heads": 8,
18
+ "decoder_ffn_dim": 2048,
19
  "decoder_layerdrop": 0.0,
20
+ "decoder_layers": 6,
21
  "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
+ "encoder_attention_heads": 8,
24
+ "encoder_ffn_dim": 2048,
25
  "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 6,
27
  "eos_token_id": 50257,
28
  "forced_decoder_ids": null,
29
  "init_std": 0.02,
 
39
  "max_target_positions": 448,
40
  "median_filter_width": 7,
41
  "model_type": "whisper",
42
+ "num_hidden_layers": 6,
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:320b511ab01a31f21f53cd59aa60e26faef21a688d802eb3f12e9edab27af6af
3
- size 151099049
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:903d502c03ca3f99b17b904249b2fc08828afd62cbab206a111fadfbb6de9f79
3
+ size 290458785
runs/Jul27_05-25-57_76a831d88296/events.out.tfevents.1690435644.76a831d88296 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc3744667cea8f4236aa64b23efec7e289fc49e213ed1cf97a337d9d7cc8cb56
3
+ size 4808
tokenizer_config.json CHANGED
@@ -19,7 +19,7 @@
19
  "single_word": false
20
  },
21
  "errors": "replace",
22
- "model_max_length": 1024,
23
  "pad_token": null,
24
  "processor_class": "WhisperProcessor",
25
  "return_attention_mask": false,
 
19
  "single_word": false
20
  },
21
  "errors": "replace",
22
+ "model_max_length": 448,
23
  "pad_token": null,
24
  "processor_class": "WhisperProcessor",
25
  "return_attention_mask": false,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f602af4d78e09a56e9bc2ffe57277831d543f955cd6aafcc59ac75296b2eb1e3
3
  size 4091
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:276cc84c23d79875ad4daf84a1b17dd43ab90567fc77451819b9fb18ac651c13
3
  size 4091