kamahori's picture
Upload LiteWhisperForConditionalGeneration
7d96429 verified
raw
history blame
5.64 kB
{
"_name_or_path": "efficient-speech/lite-whisper-large-v3-fast",
"activation_dropout": 0.0,
"activation_function": "gelu",
"apply_spec_augment": false,
"architectures": [
"LiteWhisperForConditionalGeneration"
],
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_lite_whisper.LiteWhisperConfig",
"AutoModel": "modeling_lite_whisper.LiteWhisperForConditionalGeneration"
},
"begin_suppress_tokens": null,
"bos_token_id": 50257,
"classifier_proj_size": 256,
"d_model": 1280,
"decoder_attention_heads": 20,
"decoder_ffn_dim": 5120,
"decoder_layerdrop": 0.0,
"decoder_layers": 32,
"decoder_start_token_id": 50258,
"dropout": 0.0,
"encoder_attention_heads": 20,
"encoder_ffn_dim": 5120,
"encoder_layerdrop": 0.0,
"encoder_layers": 32,
"eos_token_id": 50257,
"init_std": 0.02,
"is_encoder_decoder": true,
"low_rank_config": [
{
"fc1": 320,
"fc2": 272,
"k_proj": 32,
"out_proj": 32,
"q_proj": 32,
"v_proj": 48
},
{
"fc1": 192,
"fc2": 224,
"k_proj": 64,
"out_proj": 112,
"q_proj": 64,
"v_proj": 128
},
{
"fc1": 128,
"fc2": 112,
"k_proj": 48,
"out_proj": 160,
"q_proj": 48,
"v_proj": 160
},
{
"fc1": 192,
"fc2": 96,
"k_proj": 48,
"out_proj": 160,
"q_proj": 48,
"v_proj": 192
},
{
"fc1": 192,
"fc2": 160,
"k_proj": 48,
"out_proj": 144,
"q_proj": 64,
"v_proj": 208
},
{
"fc1": 192,
"fc2": 160,
"k_proj": 48,
"out_proj": 144,
"q_proj": 48,
"v_proj": 192
},
{
"fc1": 224,
"fc2": 224,
"k_proj": 64,
"out_proj": 208,
"q_proj": 80,
"v_proj": 304
},
{
"fc1": 240,
"fc2": 432,
"k_proj": 80,
"out_proj": 208,
"q_proj": 96,
"v_proj": 256
},
{
"fc1": 288,
"fc2": 512,
"k_proj": 80,
"out_proj": 224,
"q_proj": 96,
"v_proj": 288
},
{
"fc1": 288,
"fc2": 512,
"k_proj": 80,
"out_proj": 224,
"q_proj": 96,
"v_proj": 288
},
{
"fc1": 320,
"fc2": 528,
"k_proj": 48,
"out_proj": 240,
"q_proj": 64,
"v_proj": 288
},
{
"fc1": 320,
"fc2": 528,
"k_proj": 64,
"out_proj": 224,
"q_proj": 80,
"v_proj": 320
},
{
"fc1": 352,
"fc2": 592,
"k_proj": 64,
"out_proj": 224,
"q_proj": 64,
"v_proj": 352
},
{
"fc1": 400,
"fc2": 688,
"k_proj": 48,
"out_proj": 240,
"q_proj": 64,
"v_proj": 288
},
{
"fc1": 480,
"fc2": 656,
"k_proj": 64,
"out_proj": 256,
"q_proj": 80,
"v_proj": 448
},
{
"fc1": 384,
"fc2": 768,
"k_proj": 48,
"out_proj": 256,
"q_proj": 48,
"v_proj": 432
},
{
"fc1": 400,
"fc2": 800,
"k_proj": 48,
"out_proj": 224,
"q_proj": 64,
"v_proj": 272
},
{
"fc1": 464,
"fc2": 784,
"k_proj": 80,
"out_proj": 288,
"q_proj": 96,
"v_proj": 368
},
{
"fc1": 512,
"fc2": 848,
"k_proj": 64,
"out_proj": 256,
"q_proj": 80,
"v_proj": 304
},
{
"fc1": 528,
"fc2": 752,
"k_proj": 80,
"out_proj": 272,
"q_proj": 80,
"v_proj": 416
},
{
"fc1": 400,
"fc2": 16,
"k_proj": 80,
"out_proj": 352,
"q_proj": 96,
"v_proj": 432
},
{
"fc1": 672,
"fc2": 960,
"k_proj": 96,
"out_proj": 336,
"q_proj": 112,
"v_proj": 448
},
{
"fc1": 640,
"fc2": 976,
"k_proj": 112,
"out_proj": 448,
"q_proj": 144,
"v_proj": 544
},
{
"fc1": 720,
"k_proj": 96,
"out_proj": 416,
"q_proj": 128,
"v_proj": 464
},
{
"fc1": 752,
"k_proj": 128,
"out_proj": 448,
"q_proj": 176,
"v_proj": 560
},
{
"fc1": 752,
"k_proj": 128,
"out_proj": 416,
"q_proj": 176,
"v_proj": 592
},
{
"fc1": 848,
"k_proj": 144,
"out_proj": 496,
"q_proj": 192,
"v_proj": 624
},
{
"fc1": 912,
"fc2": 1024,
"k_proj": 128,
"out_proj": 400,
"q_proj": 192,
"v_proj": 608
},
{
"fc1": 944,
"fc2": 944,
"k_proj": 144,
"out_proj": 448,
"q_proj": 208,
"v_proj": 592
},
{
"fc1": 960,
"fc2": 976,
"k_proj": 176,
"out_proj": 496,
"q_proj": 272,
"v_proj": 560
},
{
"fc1": 896,
"fc2": 992,
"k_proj": 192,
"out_proj": 464,
"q_proj": 272,
"v_proj": 560
},
{
"fc1": 800,
"fc2": 736,
"k_proj": 176,
"out_proj": 464,
"q_proj": 272,
"v_proj": 432
}
],
"mask_feature_length": 10,
"mask_feature_min_masks": 0,
"mask_feature_prob": 0.0,
"mask_time_length": 10,
"mask_time_min_masks": 2,
"mask_time_prob": 0.05,
"max_length": null,
"max_source_positions": 1500,
"max_target_positions": 448,
"median_filter_width": 7,
"model_type": "lite-whisper",
"num_hidden_layers": 32,
"num_mel_bins": 128,
"pad_token_id": 50256,
"scale_embedding": false,
"torch_dtype": "float32",
"transformers_version": "4.46.3",
"use_cache": true,
"use_weighted_layer_sum": false,
"vocab_size": 51866
}