imfinethx's picture
for cnn_dm
2e144f7
{
"_name_or_path": "checkpoints/stabilityai/stablelm-zephyr-3b",
"anyprec": {
"arch_config": {
"layers_name": "layers",
"model_name": "model",
"module_names": [
"self_attn.q_proj",
"self_attn.k_proj",
"self_attn.v_proj",
"self_attn.o_proj",
"mlp.gate_proj",
"mlp.up_proj",
"mlp.down_proj"
]
},
"group_count": 1,
"parent_precision": 4,
"seed_precision": 2,
"sparse_numvals": {
"model.layers.0.mlp.down_proj": 778629,
"model.layers.0.mlp.gate_proj": 807734,
"model.layers.0.mlp.up_proj": 803213,
"model.layers.0.self_attn.k_proj": 1461169,
"model.layers.0.self_attn.o_proj": 640692,
"model.layers.0.self_attn.q_proj": 1592355,
"model.layers.0.self_attn.v_proj": 759195,
"model.layers.1.mlp.down_proj": 780894,
"model.layers.1.mlp.gate_proj": 663948,
"model.layers.1.mlp.up_proj": 770295,
"model.layers.1.self_attn.k_proj": 928415,
"model.layers.1.self_attn.o_proj": 1132444,
"model.layers.1.self_attn.q_proj": 943481,
"model.layers.1.self_attn.v_proj": 1042300,
"model.layers.10.mlp.down_proj": 817033,
"model.layers.10.mlp.gate_proj": 830536,
"model.layers.10.mlp.up_proj": 823874,
"model.layers.10.self_attn.k_proj": 390605,
"model.layers.10.self_attn.o_proj": 317140,
"model.layers.10.self_attn.q_proj": 376199,
"model.layers.10.self_attn.v_proj": 334549,
"model.layers.11.mlp.down_proj": 833947,
"model.layers.11.mlp.gate_proj": 868719,
"model.layers.11.mlp.up_proj": 816985,
"model.layers.11.self_attn.k_proj": 397474,
"model.layers.11.self_attn.o_proj": 311273,
"model.layers.11.self_attn.q_proj": 377782,
"model.layers.11.self_attn.v_proj": 312409,
"model.layers.12.mlp.down_proj": 828829,
"model.layers.12.mlp.gate_proj": 917976,
"model.layers.12.mlp.up_proj": 823316,
"model.layers.12.self_attn.k_proj": 361035,
"model.layers.12.self_attn.o_proj": 302116,
"model.layers.12.self_attn.q_proj": 351978,
"model.layers.12.self_attn.v_proj": 304205,
"model.layers.13.mlp.down_proj": 827309,
"model.layers.13.mlp.gate_proj": 900510,
"model.layers.13.mlp.up_proj": 821878,
"model.layers.13.self_attn.k_proj": 392417,
"model.layers.13.self_attn.o_proj": 312342,
"model.layers.13.self_attn.q_proj": 381601,
"model.layers.13.self_attn.v_proj": 333882,
"model.layers.14.mlp.down_proj": 806998,
"model.layers.14.mlp.gate_proj": 998139,
"model.layers.14.mlp.up_proj": 824569,
"model.layers.14.self_attn.k_proj": 359040,
"model.layers.14.self_attn.o_proj": 303410,
"model.layers.14.self_attn.q_proj": 348440,
"model.layers.14.self_attn.v_proj": 319003,
"model.layers.15.mlp.down_proj": 801124,
"model.layers.15.mlp.gate_proj": 959583,
"model.layers.15.mlp.up_proj": 828845,
"model.layers.15.self_attn.k_proj": 367357,
"model.layers.15.self_attn.o_proj": 311633,
"model.layers.15.self_attn.q_proj": 347473,
"model.layers.15.self_attn.v_proj": 312824,
"model.layers.16.mlp.down_proj": 794973,
"model.layers.16.mlp.gate_proj": 893569,
"model.layers.16.mlp.up_proj": 793791,
"model.layers.16.self_attn.k_proj": 384201,
"model.layers.16.self_attn.o_proj": 297904,
"model.layers.16.self_attn.q_proj": 357001,
"model.layers.16.self_attn.v_proj": 320214,
"model.layers.17.mlp.down_proj": 801463,
"model.layers.17.mlp.gate_proj": 861447,
"model.layers.17.mlp.up_proj": 792250,
"model.layers.17.self_attn.k_proj": 368087,
"model.layers.17.self_attn.o_proj": 291215,
"model.layers.17.self_attn.q_proj": 352834,
"model.layers.17.self_attn.v_proj": 298551,
"model.layers.18.mlp.down_proj": 780022,
"model.layers.18.mlp.gate_proj": 822135,
"model.layers.18.mlp.up_proj": 770609,
"model.layers.18.self_attn.k_proj": 367388,
"model.layers.18.self_attn.o_proj": 301723,
"model.layers.18.self_attn.q_proj": 348144,
"model.layers.18.self_attn.v_proj": 314015,
"model.layers.19.mlp.down_proj": 781622,
"model.layers.19.mlp.gate_proj": 809307,
"model.layers.19.mlp.up_proj": 777374,
"model.layers.19.self_attn.k_proj": 346614,
"model.layers.19.self_attn.o_proj": 292359,
"model.layers.19.self_attn.q_proj": 330734,
"model.layers.19.self_attn.v_proj": 299820,
"model.layers.2.mlp.down_proj": 750836,
"model.layers.2.mlp.gate_proj": 685852,
"model.layers.2.mlp.up_proj": 760850,
"model.layers.2.self_attn.k_proj": 945792,
"model.layers.2.self_attn.o_proj": 383107,
"model.layers.2.self_attn.q_proj": 849195,
"model.layers.2.self_attn.v_proj": 344129,
"model.layers.20.mlp.down_proj": 786966,
"model.layers.20.mlp.gate_proj": 812499,
"model.layers.20.mlp.up_proj": 773175,
"model.layers.20.self_attn.k_proj": 341039,
"model.layers.20.self_attn.o_proj": 296095,
"model.layers.20.self_attn.q_proj": 331543,
"model.layers.20.self_attn.v_proj": 291150,
"model.layers.21.mlp.down_proj": 782960,
"model.layers.21.mlp.gate_proj": 792888,
"model.layers.21.mlp.up_proj": 753523,
"model.layers.21.self_attn.k_proj": 352377,
"model.layers.21.self_attn.o_proj": 295497,
"model.layers.21.self_attn.q_proj": 346059,
"model.layers.21.self_attn.v_proj": 297082,
"model.layers.22.mlp.down_proj": 766257,
"model.layers.22.mlp.gate_proj": 782287,
"model.layers.22.mlp.up_proj": 764779,
"model.layers.22.self_attn.k_proj": 341186,
"model.layers.22.self_attn.o_proj": 301225,
"model.layers.22.self_attn.q_proj": 329719,
"model.layers.22.self_attn.v_proj": 296752,
"model.layers.23.mlp.down_proj": 780103,
"model.layers.23.mlp.gate_proj": 765576,
"model.layers.23.mlp.up_proj": 768508,
"model.layers.23.self_attn.k_proj": 341081,
"model.layers.23.self_attn.o_proj": 298603,
"model.layers.23.self_attn.q_proj": 334699,
"model.layers.23.self_attn.v_proj": 301314,
"model.layers.24.mlp.down_proj": 767341,
"model.layers.24.mlp.gate_proj": 751797,
"model.layers.24.mlp.up_proj": 767790,
"model.layers.24.self_attn.k_proj": 353380,
"model.layers.24.self_attn.o_proj": 296557,
"model.layers.24.self_attn.q_proj": 340550,
"model.layers.24.self_attn.v_proj": 294305,
"model.layers.25.mlp.down_proj": 765684,
"model.layers.25.mlp.gate_proj": 740912,
"model.layers.25.mlp.up_proj": 742629,
"model.layers.25.self_attn.k_proj": 342154,
"model.layers.25.self_attn.o_proj": 300439,
"model.layers.25.self_attn.q_proj": 333588,
"model.layers.25.self_attn.v_proj": 303279,
"model.layers.26.mlp.down_proj": 767616,
"model.layers.26.mlp.gate_proj": 741326,
"model.layers.26.mlp.up_proj": 764813,
"model.layers.26.self_attn.k_proj": 367959,
"model.layers.26.self_attn.o_proj": 293380,
"model.layers.26.self_attn.q_proj": 342700,
"model.layers.26.self_attn.v_proj": 299660,
"model.layers.27.mlp.down_proj": 762326,
"model.layers.27.mlp.gate_proj": 736795,
"model.layers.27.mlp.up_proj": 752581,
"model.layers.27.self_attn.k_proj": 358916,
"model.layers.27.self_attn.o_proj": 291015,
"model.layers.27.self_attn.q_proj": 344621,
"model.layers.27.self_attn.v_proj": 290539,
"model.layers.28.mlp.down_proj": 751370,
"model.layers.28.mlp.gate_proj": 752285,
"model.layers.28.mlp.up_proj": 766889,
"model.layers.28.self_attn.k_proj": 367105,
"model.layers.28.self_attn.o_proj": 293327,
"model.layers.28.self_attn.q_proj": 337209,
"model.layers.28.self_attn.v_proj": 294827,
"model.layers.29.mlp.down_proj": 759893,
"model.layers.29.mlp.gate_proj": 787132,
"model.layers.29.mlp.up_proj": 758499,
"model.layers.29.self_attn.k_proj": 363047,
"model.layers.29.self_attn.o_proj": 302596,
"model.layers.29.self_attn.q_proj": 344432,
"model.layers.29.self_attn.v_proj": 301537,
"model.layers.3.mlp.down_proj": 765483,
"model.layers.3.mlp.gate_proj": 658112,
"model.layers.3.mlp.up_proj": 765970,
"model.layers.3.self_attn.k_proj": 536020,
"model.layers.3.self_attn.o_proj": 347045,
"model.layers.3.self_attn.q_proj": 504570,
"model.layers.3.self_attn.v_proj": 328300,
"model.layers.30.mlp.down_proj": 820430,
"model.layers.30.mlp.gate_proj": 839540,
"model.layers.30.mlp.up_proj": 768724,
"model.layers.30.self_attn.k_proj": 345312,
"model.layers.30.self_attn.o_proj": 313055,
"model.layers.30.self_attn.q_proj": 331580,
"model.layers.30.self_attn.v_proj": 313570,
"model.layers.31.mlp.down_proj": 851291,
"model.layers.31.mlp.gate_proj": 904549,
"model.layers.31.mlp.up_proj": 763949,
"model.layers.31.self_attn.k_proj": 326266,
"model.layers.31.self_attn.o_proj": 342810,
"model.layers.31.self_attn.q_proj": 322170,
"model.layers.31.self_attn.v_proj": 327819,
"model.layers.4.mlp.down_proj": 767625,
"model.layers.4.mlp.gate_proj": 658258,
"model.layers.4.mlp.up_proj": 755330,
"model.layers.4.self_attn.k_proj": 454540,
"model.layers.4.self_attn.o_proj": 323318,
"model.layers.4.self_attn.q_proj": 438780,
"model.layers.4.self_attn.v_proj": 318874,
"model.layers.5.mlp.down_proj": 748111,
"model.layers.5.mlp.gate_proj": 670481,
"model.layers.5.mlp.up_proj": 752601,
"model.layers.5.self_attn.k_proj": 414950,
"model.layers.5.self_attn.o_proj": 349395,
"model.layers.5.self_attn.q_proj": 399806,
"model.layers.5.self_attn.v_proj": 360478,
"model.layers.6.mlp.down_proj": 769047,
"model.layers.6.mlp.gate_proj": 685535,
"model.layers.6.mlp.up_proj": 767187,
"model.layers.6.self_attn.k_proj": 377153,
"model.layers.6.self_attn.o_proj": 302937,
"model.layers.6.self_attn.q_proj": 356321,
"model.layers.6.self_attn.v_proj": 321553,
"model.layers.7.mlp.down_proj": 790176,
"model.layers.7.mlp.gate_proj": 702881,
"model.layers.7.mlp.up_proj": 787154,
"model.layers.7.self_attn.k_proj": 361359,
"model.layers.7.self_attn.o_proj": 299360,
"model.layers.7.self_attn.q_proj": 344262,
"model.layers.7.self_attn.v_proj": 303575,
"model.layers.8.mlp.down_proj": 805556,
"model.layers.8.mlp.gate_proj": 762210,
"model.layers.8.mlp.up_proj": 797354,
"model.layers.8.self_attn.k_proj": 389519,
"model.layers.8.self_attn.o_proj": 299609,
"model.layers.8.self_attn.q_proj": 374913,
"model.layers.8.self_attn.v_proj": 312783,
"model.layers.9.mlp.down_proj": 809737,
"model.layers.9.mlp.gate_proj": 799184,
"model.layers.9.mlp.up_proj": 798637,
"model.layers.9.self_attn.k_proj": 378165,
"model.layers.9.self_attn.o_proj": 309393,
"model.layers.9.self_attn.q_proj": 368689,
"model.layers.9.self_attn.v_proj": 317669
}
},
"architectures": [
"StableLmForCausalLM"
],
"attention_dropout": 0.0,
"bos_token_id": 0,
"eos_token_id": 0,
"hidden_act": "silu",
"hidden_dropout": 0.0,
"hidden_size": 2560,
"initializer_range": 0.02,
"intermediate_size": 6912,
"layer_norm_eps": 1e-05,
"max_position_embeddings": 4096,
"model_type": "stablelm",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"partial_rotary_factor": 0.25,
"rope_scaling": null,
"rope_theta": 10000,
"tie_word_embeddings": false,
"torch_dtype": "float16",
"transformers_version": "4.39.3",
"use_cache": true,
"use_qkv_bias": false,
"vocab_size": 50304
}