{ "_name_or_path": "checkpoints/mtgv/MobileLLaMA-1.4B-Chat", "anyprec": { "arch_config": { "layers_name": "layers", "model_name": "model", "module_names": [ "self_attn.q_proj", "self_attn.k_proj", "self_attn.v_proj", "self_attn.o_proj", "mlp.gate_proj", "mlp.up_proj", "mlp.down_proj" ] }, "group_count": 1, "parent_precision": 4, "seed_precision": 2, "sparse_numvals": { "model.layers.0.mlp.down_proj": 400523, "model.layers.0.mlp.gate_proj": 396264, "model.layers.0.mlp.up_proj": 385037, "model.layers.0.self_attn.k_proj": 275407, "model.layers.0.self_attn.o_proj": 150081, "model.layers.0.self_attn.q_proj": 255588, "model.layers.0.self_attn.v_proj": 160697, "model.layers.1.mlp.down_proj": 386409, "model.layers.1.mlp.gate_proj": 393215, "model.layers.1.mlp.up_proj": 385587, "model.layers.1.self_attn.k_proj": 475524, "model.layers.1.self_attn.o_proj": 221750, "model.layers.1.self_attn.q_proj": 476847, "model.layers.1.self_attn.v_proj": 176783, "model.layers.10.mlp.down_proj": 387140, "model.layers.10.mlp.gate_proj": 425728, "model.layers.10.mlp.up_proj": 401174, "model.layers.10.self_attn.k_proj": 299367, "model.layers.10.self_attn.o_proj": 165346, "model.layers.10.self_attn.q_proj": 279251, "model.layers.10.self_attn.v_proj": 171614, "model.layers.11.mlp.down_proj": 387474, "model.layers.11.mlp.gate_proj": 424181, "model.layers.11.mlp.up_proj": 401896, "model.layers.11.self_attn.k_proj": 285408, "model.layers.11.self_attn.o_proj": 155248, "model.layers.11.self_attn.q_proj": 278893, "model.layers.11.self_attn.v_proj": 166240, "model.layers.12.mlp.down_proj": 396426, "model.layers.12.mlp.gate_proj": 443150, "model.layers.12.mlp.up_proj": 414688, "model.layers.12.self_attn.k_proj": 270116, "model.layers.12.self_attn.o_proj": 148084, "model.layers.12.self_attn.q_proj": 257984, "model.layers.12.self_attn.v_proj": 166043, "model.layers.13.mlp.down_proj": 409209, "model.layers.13.mlp.gate_proj": 468928, "model.layers.13.mlp.up_proj": 410030, "model.layers.13.self_attn.k_proj": 277122, "model.layers.13.self_attn.o_proj": 158213, "model.layers.13.self_attn.q_proj": 264605, "model.layers.13.self_attn.v_proj": 183753, "model.layers.14.mlp.down_proj": 410006, "model.layers.14.mlp.gate_proj": 481442, "model.layers.14.mlp.up_proj": 427451, "model.layers.14.self_attn.k_proj": 278867, "model.layers.14.self_attn.o_proj": 155983, "model.layers.14.self_attn.q_proj": 268760, "model.layers.14.self_attn.v_proj": 176188, "model.layers.15.mlp.down_proj": 411189, "model.layers.15.mlp.gate_proj": 471248, "model.layers.15.mlp.up_proj": 421665, "model.layers.15.self_attn.k_proj": 281331, "model.layers.15.self_attn.o_proj": 162574, "model.layers.15.self_attn.q_proj": 284610, "model.layers.15.self_attn.v_proj": 180714, "model.layers.16.mlp.down_proj": 418971, "model.layers.16.mlp.gate_proj": 464595, "model.layers.16.mlp.up_proj": 423591, "model.layers.16.self_attn.k_proj": 261045, "model.layers.16.self_attn.o_proj": 160088, "model.layers.16.self_attn.q_proj": 250977, "model.layers.16.self_attn.v_proj": 177848, "model.layers.17.mlp.down_proj": 409538, "model.layers.17.mlp.gate_proj": 450034, "model.layers.17.mlp.up_proj": 415172, "model.layers.17.self_attn.k_proj": 276751, "model.layers.17.self_attn.o_proj": 161674, "model.layers.17.self_attn.q_proj": 266644, "model.layers.17.self_attn.v_proj": 173601, "model.layers.18.mlp.down_proj": 399252, "model.layers.18.mlp.gate_proj": 427813, "model.layers.18.mlp.up_proj": 422532, "model.layers.18.self_attn.k_proj": 255264, "model.layers.18.self_attn.o_proj": 162202, "model.layers.18.self_attn.q_proj": 270035, "model.layers.18.self_attn.v_proj": 173328, "model.layers.19.mlp.down_proj": 393065, "model.layers.19.mlp.gate_proj": 415779, "model.layers.19.mlp.up_proj": 419838, "model.layers.19.self_attn.k_proj": 254719, "model.layers.19.self_attn.o_proj": 185780, "model.layers.19.self_attn.q_proj": 250004, "model.layers.19.self_attn.v_proj": 185533, "model.layers.2.mlp.down_proj": 378193, "model.layers.2.mlp.gate_proj": 376952, "model.layers.2.mlp.up_proj": 381512, "model.layers.2.self_attn.k_proj": 368064, "model.layers.2.self_attn.o_proj": 167961, "model.layers.2.self_attn.q_proj": 314862, "model.layers.2.self_attn.v_proj": 158764, "model.layers.20.mlp.down_proj": 400546, "model.layers.20.mlp.gate_proj": 415086, "model.layers.20.mlp.up_proj": 406972, "model.layers.20.self_attn.k_proj": 233681, "model.layers.20.self_attn.o_proj": 170441, "model.layers.20.self_attn.q_proj": 229955, "model.layers.20.self_attn.v_proj": 169777, "model.layers.21.mlp.down_proj": 389671, "model.layers.21.mlp.gate_proj": 405414, "model.layers.21.mlp.up_proj": 397729, "model.layers.21.self_attn.k_proj": 230648, "model.layers.21.self_attn.o_proj": 204977, "model.layers.21.self_attn.q_proj": 230574, "model.layers.21.self_attn.v_proj": 199698, "model.layers.22.mlp.down_proj": 414621, "model.layers.22.mlp.gate_proj": 415759, "model.layers.22.mlp.up_proj": 408002, "model.layers.22.self_attn.k_proj": 251619, "model.layers.22.self_attn.o_proj": 185353, "model.layers.22.self_attn.q_proj": 246379, "model.layers.22.self_attn.v_proj": 186204, "model.layers.23.mlp.down_proj": 508914, "model.layers.23.mlp.gate_proj": 468804, "model.layers.23.mlp.up_proj": 480458, "model.layers.23.self_attn.k_proj": 205880, "model.layers.23.self_attn.o_proj": 208179, "model.layers.23.self_attn.q_proj": 199025, "model.layers.23.self_attn.v_proj": 194158, "model.layers.3.mlp.down_proj": 378183, "model.layers.3.mlp.gate_proj": 377659, "model.layers.3.mlp.up_proj": 375109, "model.layers.3.self_attn.k_proj": 294567, "model.layers.3.self_attn.o_proj": 144791, "model.layers.3.self_attn.q_proj": 248337, "model.layers.3.self_attn.v_proj": 149933, "model.layers.4.mlp.down_proj": 381223, "model.layers.4.mlp.gate_proj": 380416, "model.layers.4.mlp.up_proj": 387728, "model.layers.4.self_attn.k_proj": 292882, "model.layers.4.self_attn.o_proj": 143059, "model.layers.4.self_attn.q_proj": 237856, "model.layers.4.self_attn.v_proj": 148584, "model.layers.5.mlp.down_proj": 390101, "model.layers.5.mlp.gate_proj": 384475, "model.layers.5.mlp.up_proj": 377591, "model.layers.5.self_attn.k_proj": 284178, "model.layers.5.self_attn.o_proj": 155071, "model.layers.5.self_attn.q_proj": 256165, "model.layers.5.self_attn.v_proj": 169034, "model.layers.6.mlp.down_proj": 382083, "model.layers.6.mlp.gate_proj": 390114, "model.layers.6.mlp.up_proj": 387960, "model.layers.6.self_attn.k_proj": 290834, "model.layers.6.self_attn.o_proj": 170995, "model.layers.6.self_attn.q_proj": 256834, "model.layers.6.self_attn.v_proj": 183257, "model.layers.7.mlp.down_proj": 385748, "model.layers.7.mlp.gate_proj": 398626, "model.layers.7.mlp.up_proj": 392283, "model.layers.7.self_attn.k_proj": 253397, "model.layers.7.self_attn.o_proj": 154212, "model.layers.7.self_attn.q_proj": 221549, "model.layers.7.self_attn.v_proj": 160685, "model.layers.8.mlp.down_proj": 394012, "model.layers.8.mlp.gate_proj": 413009, "model.layers.8.mlp.up_proj": 397484, "model.layers.8.self_attn.k_proj": 270590, "model.layers.8.self_attn.o_proj": 171845, "model.layers.8.self_attn.q_proj": 238918, "model.layers.8.self_attn.v_proj": 193766, "model.layers.9.mlp.down_proj": 393867, "model.layers.9.mlp.gate_proj": 409331, "model.layers.9.mlp.up_proj": 400690, "model.layers.9.self_attn.k_proj": 285793, "model.layers.9.self_attn.o_proj": 175292, "model.layers.9.self_attn.q_proj": 271217, "model.layers.9.self_attn.v_proj": 188112 } }, "architectures": [ "LlamaForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 5632, "max_position_embeddings": 2048, "max_sequence_length": 2048, "model_type": "llama", "num_attention_heads": 16, "num_hidden_layers": 24, "num_key_value_heads": 16, "pad_token_id": 0, "pretraining_tp": 1, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.39.3", "use_cache": true, "vocab_size": 32000 }