wejoncy commited on
Commit
3f33b9c
1 Parent(s): 8143595

update config

Browse files
Files changed (1) hide show
  1. config.json +14 -13
config.json CHANGED
@@ -16,7 +16,16 @@
16
  "num_attention_heads": 40,
17
  "num_hidden_layers": 64,
18
  "num_key_value_heads": 8,
19
- "quant_config": {
 
 
 
 
 
 
 
 
 
20
  "model.layers.0.mlp.down_proj": {
21
  "bias": null,
22
  "enable_norm": true,
@@ -10768,15 +10777,7 @@
10768
  -1,
10769
  8
10770
  ]
10771
- }
10772
- },
10773
- "rms_norm_eps": 1e-06,
10774
- "rope_theta": 1000000.0,
10775
- "sliding_window": null,
10776
- "tie_word_embeddings": false,
10777
- "torch_dtype": "bfloat16",
10778
- "transformers_version": "4.37.2",
10779
- "use_cache": true,
10780
- "use_sliding_window": false,
10781
- "vocab_size": 152064
10782
- }
 
16
  "num_attention_heads": 40,
17
  "num_hidden_layers": 64,
18
  "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.37.2",
25
+ "use_cache": true,
26
+ "use_sliding_window": false,
27
+ "vocab_size": 152064,
28
+ "quantization_config": {
29
  "model.layers.0.mlp.down_proj": {
30
  "bias": null,
31
  "enable_norm": true,
 
10777
  -1,
10778
  8
10779
  ]
10780
+ },
10781
+ "quant_method": "vptq"
10782
+ }
10783
+ }