|
{ |
|
"_name_or_path": "huggingface_repos/Llama-2-7b-hf-asvd95", |
|
"architectures": [ |
|
"ASVDLlamaForCausalLM" |
|
], |
|
"attention_bias": false, |
|
"auto_map": { |
|
"AutoConfig": "configuration_asvd_llama.ASVDLlamaConfig", |
|
"AutoModelForCausalLM": "modeling_asvd_llama.ASVDLlamaForCausalLM" |
|
}, |
|
"bos_token_id": 1, |
|
"eos_token_id": 2, |
|
"hidden_act": "silu", |
|
"hidden_size": 4096, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 11008, |
|
"max_position_embeddings": 4096, |
|
"model_type": "llama", |
|
"num_attention_heads": 32, |
|
"num_hidden_layers": 32, |
|
"num_key_value_heads": 32, |
|
"pretraining_tp": 1, |
|
"rms_norm_eps": 1e-05, |
|
"rope_scaling": null, |
|
"rope_theta": 10000.0, |
|
"tie_word_embeddings": false, |
|
"torch_dtype": "float16", |
|
"transformers_version": "4.35.2", |
|
"truncation_ranks": { |
|
"model.layers.0.mlp.down_proj": 2686, |
|
"model.layers.0.mlp.gate_proj": 1791, |
|
"model.layers.0.mlp.up_proj": 2089, |
|
"model.layers.0.self_attn.k_proj": 204, |
|
"model.layers.0.self_attn.o_proj": 1433, |
|
"model.layers.0.self_attn.q_proj": 204, |
|
"model.layers.0.self_attn.v_proj": 614, |
|
"model.layers.1.mlp.gate_proj": 2388, |
|
"model.layers.1.self_attn.k_proj": 614, |
|
"model.layers.1.self_attn.o_proj": 1843, |
|
"model.layers.1.self_attn.q_proj": 204, |
|
"model.layers.1.self_attn.v_proj": 1228, |
|
"model.layers.11.self_attn.q_proj": 409, |
|
"model.layers.12.mlp.gate_proj": 2089, |
|
"model.layers.12.mlp.up_proj": 1791, |
|
"model.layers.12.self_attn.q_proj": 819, |
|
"model.layers.14.self_attn.q_proj": 1843, |
|
"model.layers.16.self_attn.k_proj": 1024, |
|
"model.layers.16.self_attn.q_proj": 1024, |
|
"model.layers.17.self_attn.k_proj": 1843, |
|
"model.layers.18.self_attn.q_proj": 1843, |
|
"model.layers.20.self_attn.k_proj": 409, |
|
"model.layers.20.self_attn.q_proj": 614, |
|
"model.layers.24.mlp.gate_proj": 2686, |
|
"model.layers.24.mlp.up_proj": 2686, |
|
"model.layers.24.self_attn.k_proj": 204, |
|
"model.layers.24.self_attn.o_proj": 1638, |
|
"model.layers.24.self_attn.q_proj": 204, |
|
"model.layers.26.self_attn.k_proj": 1433, |
|
"model.layers.27.self_attn.k_proj": 1024, |
|
"model.layers.27.self_attn.q_proj": 1228, |
|
"model.layers.29.self_attn.q_proj": 1638, |
|
"model.layers.3.self_attn.k_proj": 614, |
|
"model.layers.6.self_attn.k_proj": 1228, |
|
"model.layers.7.self_attn.k_proj": 1638, |
|
"model.layers.7.self_attn.q_proj": 1843 |
|
}, |
|
"use_cache": true, |
|
"vocab_size": 32000 |
|
} |
|
|