PerryCheng614
commited on
Commit
•
946ecea
1
Parent(s):
b902540
Upload folder using huggingface_hub
Browse files- config.json +62 -0
- model.safetensors +3 -0
- quant_log.csv +113 -0
- quantize_config.json +21 -0
config.json
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_attn_implementation_autoset": true,
|
3 |
+
"_name_or_path": "meta-llama/Llama-3.2-1B-Instruct",
|
4 |
+
"architectures": [
|
5 |
+
"LlamaForCausalLM"
|
6 |
+
],
|
7 |
+
"attention_bias": false,
|
8 |
+
"attention_dropout": 0.0,
|
9 |
+
"bos_token_id": 128000,
|
10 |
+
"eos_token_id": [
|
11 |
+
128001,
|
12 |
+
128008,
|
13 |
+
128009
|
14 |
+
],
|
15 |
+
"head_dim": 64,
|
16 |
+
"hidden_act": "silu",
|
17 |
+
"hidden_size": 2048,
|
18 |
+
"initializer_range": 0.02,
|
19 |
+
"intermediate_size": 8192,
|
20 |
+
"max_position_embeddings": 131072,
|
21 |
+
"mlp_bias": false,
|
22 |
+
"model_type": "llama",
|
23 |
+
"num_attention_heads": 32,
|
24 |
+
"num_hidden_layers": 16,
|
25 |
+
"num_key_value_heads": 8,
|
26 |
+
"pretraining_tp": 1,
|
27 |
+
"quantization_config": {
|
28 |
+
"bits": 4,
|
29 |
+
"checkpoint_format": "gptq",
|
30 |
+
"desc_act": true,
|
31 |
+
"dynamic": null,
|
32 |
+
"group_size": 32,
|
33 |
+
"lm_head": false,
|
34 |
+
"meta": {
|
35 |
+
"damp_auto_increment": 0.0025,
|
36 |
+
"damp_percent": 0.01,
|
37 |
+
"mse": 0.0,
|
38 |
+
"quantizer": [
|
39 |
+
"gptqmodel:1.4.6-dev"
|
40 |
+
],
|
41 |
+
"static_groups": false,
|
42 |
+
"true_sequential": true,
|
43 |
+
"uri": "https://github.com/modelcloud/gptqmodel"
|
44 |
+
},
|
45 |
+
"quant_method": "gptq",
|
46 |
+
"sym": true
|
47 |
+
},
|
48 |
+
"rms_norm_eps": 1e-05,
|
49 |
+
"rope_scaling": {
|
50 |
+
"factor": 32.0,
|
51 |
+
"high_freq_factor": 4.0,
|
52 |
+
"low_freq_factor": 1.0,
|
53 |
+
"original_max_position_embeddings": 8192,
|
54 |
+
"rope_type": "llama3"
|
55 |
+
},
|
56 |
+
"rope_theta": 500000.0,
|
57 |
+
"tie_word_embeddings": true,
|
58 |
+
"torch_dtype": "bfloat16",
|
59 |
+
"transformers_version": "4.47.1",
|
60 |
+
"use_cache": true,
|
61 |
+
"vocab_size": 128256
|
62 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:751aa2c38ca552ae90ac9c5968b7ce3ace0e08784a1d1056863b8424ca014299
|
3 |
+
size 1614733472
|
quant_log.csv
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
layer,module,loss,damp,time
|
2 |
+
0,self_attn.k_proj,0.20627,0.01000,0.986
|
3 |
+
0,self_attn.v_proj,0.00507,0.01000,0.607
|
4 |
+
0,self_attn.q_proj,0.42227,0.01000,0.610
|
5 |
+
0,self_attn.o_proj,0.00047,0.01000,0.603
|
6 |
+
0,mlp.up_proj,0.35409,0.01000,0.636
|
7 |
+
0,mlp.gate_proj,0.44565,0.01000,0.749
|
8 |
+
0,mlp.down_proj,0.00251,0.01000,2.774
|
9 |
+
1,self_attn.k_proj,0.33142,0.01000,0.600
|
10 |
+
1,self_attn.v_proj,0.01882,0.01000,0.615
|
11 |
+
1,self_attn.q_proj,0.61006,0.01000,0.611
|
12 |
+
1,self_attn.o_proj,0.00132,0.01000,0.608
|
13 |
+
1,mlp.up_proj,0.50808,0.01000,0.643
|
14 |
+
1,mlp.gate_proj,0.69618,0.01000,0.790
|
15 |
+
1,mlp.down_proj,1.31996,0.01000,2.535
|
16 |
+
2,self_attn.k_proj,0.65871,0.01000,0.607
|
17 |
+
2,self_attn.v_proj,0.04459,0.01000,0.610
|
18 |
+
2,self_attn.q_proj,1.27351,0.01000,0.619
|
19 |
+
2,self_attn.o_proj,0.00206,0.01000,0.596
|
20 |
+
2,mlp.up_proj,0.65205,0.01000,0.631
|
21 |
+
2,mlp.gate_proj,1.04195,0.01000,0.784
|
22 |
+
2,mlp.down_proj,0.00808,0.01000,2.504
|
23 |
+
3,self_attn.k_proj,0.43784,0.01000,0.592
|
24 |
+
3,self_attn.v_proj,0.05698,0.01000,0.586
|
25 |
+
3,self_attn.q_proj,0.94526,0.01000,0.610
|
26 |
+
3,self_attn.o_proj,0.00391,0.01000,0.644
|
27 |
+
3,mlp.up_proj,0.79450,0.01000,0.621
|
28 |
+
3,mlp.gate_proj,1.58985,0.01000,0.798
|
29 |
+
3,mlp.down_proj,0.01225,0.01000,2.532
|
30 |
+
4,self_attn.k_proj,0.46703,0.01000,0.583
|
31 |
+
4,self_attn.v_proj,0.05344,0.01000,0.588
|
32 |
+
4,self_attn.q_proj,0.95165,0.01000,0.609
|
33 |
+
4,self_attn.o_proj,0.00573,0.01000,0.604
|
34 |
+
4,mlp.up_proj,0.80078,0.01000,0.630
|
35 |
+
4,mlp.gate_proj,1.73697,0.01000,0.794
|
36 |
+
4,mlp.down_proj,0.01413,0.01000,2.501
|
37 |
+
5,self_attn.k_proj,0.69090,0.01000,0.596
|
38 |
+
5,self_attn.v_proj,0.04588,0.01000,0.589
|
39 |
+
5,self_attn.q_proj,1.21408,0.01000,0.664
|
40 |
+
5,self_attn.o_proj,0.00586,0.01000,0.612
|
41 |
+
5,mlp.up_proj,0.86928,0.01000,0.637
|
42 |
+
5,mlp.gate_proj,1.61338,0.01000,0.800
|
43 |
+
5,mlp.down_proj,0.01706,0.01000,2.504
|
44 |
+
6,self_attn.k_proj,0.56137,0.01000,0.592
|
45 |
+
6,self_attn.v_proj,0.05914,0.01000,0.583
|
46 |
+
6,self_attn.q_proj,0.88346,0.01000,0.589
|
47 |
+
6,self_attn.o_proj,0.00905,0.01000,0.614
|
48 |
+
6,mlp.up_proj,0.88001,0.01000,0.630
|
49 |
+
6,mlp.gate_proj,1.59125,0.01000,0.793
|
50 |
+
6,mlp.down_proj,0.01736,0.01000,2.431
|
51 |
+
7,self_attn.k_proj,0.56204,0.01000,0.584
|
52 |
+
7,self_attn.v_proj,0.06787,0.01000,0.578
|
53 |
+
7,self_attn.q_proj,1.04871,0.01000,0.627
|
54 |
+
7,self_attn.o_proj,0.00866,0.01000,0.598
|
55 |
+
7,mlp.up_proj,0.91172,0.01000,0.630
|
56 |
+
7,mlp.gate_proj,1.47971,0.01000,0.801
|
57 |
+
7,mlp.down_proj,0.01878,0.01000,2.453
|
58 |
+
8,self_attn.k_proj,0.65364,0.01000,0.587
|
59 |
+
8,self_attn.v_proj,0.06459,0.01000,0.599
|
60 |
+
8,self_attn.q_proj,1.06362,0.01000,0.589
|
61 |
+
8,self_attn.o_proj,0.01113,0.01000,0.585
|
62 |
+
8,mlp.up_proj,1.00755,0.01000,0.611
|
63 |
+
8,mlp.gate_proj,1.59544,0.01000,0.796
|
64 |
+
8,mlp.down_proj,0.02531,0.01000,2.416
|
65 |
+
9,self_attn.k_proj,0.54082,0.01000,0.584
|
66 |
+
9,self_attn.v_proj,0.07637,0.01000,0.580
|
67 |
+
9,self_attn.q_proj,1.30084,0.01000,0.591
|
68 |
+
9,self_attn.o_proj,0.01507,0.01000,0.603
|
69 |
+
9,mlp.up_proj,1.07703,0.01000,0.622
|
70 |
+
9,mlp.gate_proj,1.76000,0.01000,0.796
|
71 |
+
9,mlp.down_proj,0.03086,0.01000,2.426
|
72 |
+
10,self_attn.k_proj,0.66062,0.01000,0.590
|
73 |
+
10,self_attn.v_proj,0.08113,0.01000,0.583
|
74 |
+
10,self_attn.q_proj,1.32715,0.01000,0.583
|
75 |
+
10,self_attn.o_proj,0.01019,0.01000,0.594
|
76 |
+
10,mlp.up_proj,1.28599,0.01000,0.622
|
77 |
+
10,mlp.gate_proj,2.02958,0.01000,0.791
|
78 |
+
10,mlp.down_proj,0.03905,0.01000,2.438
|
79 |
+
11,self_attn.k_proj,0.79215,0.01000,0.583
|
80 |
+
11,self_attn.v_proj,0.08159,0.01000,0.590
|
81 |
+
11,self_attn.q_proj,1.31279,0.01000,0.571
|
82 |
+
11,self_attn.o_proj,0.00862,0.01000,0.625
|
83 |
+
11,mlp.up_proj,1.42253,0.01000,0.641
|
84 |
+
11,mlp.gate_proj,2.19144,0.01000,0.801
|
85 |
+
11,mlp.down_proj,0.04266,0.01000,2.499
|
86 |
+
12,self_attn.k_proj,0.76721,0.01000,0.585
|
87 |
+
12,self_attn.v_proj,0.08425,0.01000,0.580
|
88 |
+
12,self_attn.q_proj,1.29475,0.01000,0.582
|
89 |
+
12,self_attn.o_proj,0.00837,0.01000,0.586
|
90 |
+
12,mlp.up_proj,1.47983,0.01000,0.626
|
91 |
+
12,mlp.gate_proj,2.15212,0.01000,0.804
|
92 |
+
12,mlp.down_proj,0.04750,0.01000,2.422
|
93 |
+
13,self_attn.k_proj,0.73892,0.01000,0.589
|
94 |
+
13,self_attn.v_proj,0.13723,0.01000,0.574
|
95 |
+
13,self_attn.q_proj,1.45466,0.01000,0.589
|
96 |
+
13,self_attn.o_proj,0.01131,0.01000,0.578
|
97 |
+
13,mlp.up_proj,1.73117,0.01000,0.623
|
98 |
+
13,mlp.gate_proj,2.29325,0.01000,0.791
|
99 |
+
13,mlp.down_proj,0.06902,0.01000,2.415
|
100 |
+
14,self_attn.k_proj,0.79778,0.01000,0.588
|
101 |
+
14,self_attn.v_proj,0.29438,0.01000,0.582
|
102 |
+
14,self_attn.q_proj,1.52455,0.01000,0.580
|
103 |
+
14,self_attn.o_proj,0.02765,0.01000,0.586
|
104 |
+
14,mlp.up_proj,1.98990,0.01000,0.606
|
105 |
+
14,mlp.gate_proj,2.84552,0.01000,0.621
|
106 |
+
14,mlp.down_proj,0.09091,0.01000,2.440
|
107 |
+
15,self_attn.k_proj,0.71842,0.01000,0.607
|
108 |
+
15,self_attn.v_proj,0.31329,0.01000,0.577
|
109 |
+
15,self_attn.q_proj,1.39225,0.01000,0.585
|
110 |
+
15,self_attn.o_proj,0.09505,0.01000,0.585
|
111 |
+
15,mlp.up_proj,2.41008,0.01000,0.636
|
112 |
+
15,mlp.gate_proj,3.18340,0.01000,0.802
|
113 |
+
15,mlp.down_proj,0.21390,0.01000,2.454
|
quantize_config.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bits": 4,
|
3 |
+
"dynamic": null,
|
4 |
+
"group_size": 32,
|
5 |
+
"desc_act": true,
|
6 |
+
"sym": true,
|
7 |
+
"lm_head": false,
|
8 |
+
"quant_method": "gptq",
|
9 |
+
"checkpoint_format": "gptq",
|
10 |
+
"meta": {
|
11 |
+
"quantizer": [
|
12 |
+
"gptqmodel:1.4.6-dev"
|
13 |
+
],
|
14 |
+
"uri": "https://github.com/modelcloud/gptqmodel",
|
15 |
+
"damp_percent": 0.01,
|
16 |
+
"damp_auto_increment": 0.0025,
|
17 |
+
"static_groups": false,
|
18 |
+
"true_sequential": true,
|
19 |
+
"mse": 0.0
|
20 |
+
}
|
21 |
+
}
|