wejoncy commited on
Commit
8051500
1 Parent(s): 231e78e

update config

Browse files
Files changed (1) hide show
  1. config.json +13 -12
config.json CHANGED
@@ -17,7 +17,15 @@
17
  "num_hidden_layers": 40,
18
  "num_key_value_heads": 40,
19
  "pretraining_tp": 1,
20
- "quant_config": {
 
 
 
 
 
 
 
 
21
  "model.layers.0.mlp.down_proj": {
22
  "bias": false,
23
  "enable_norm": true,
@@ -6737,14 +6745,7 @@
6737
  -1,
6738
  4
6739
  ]
6740
- }
6741
- },
6742
- "rms_norm_eps": 1e-05,
6743
- "rope_scaling": null,
6744
- "rope_theta": 10000.0,
6745
- "tie_word_embeddings": false,
6746
- "torch_dtype": "float16",
6747
- "transformers_version": "4.37.2",
6748
- "use_cache": true,
6749
- "vocab_size": 32000
6750
- }
 
17
  "num_hidden_layers": 40,
18
  "num_key_value_heads": 40,
19
  "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
+ "transformers_version": "4.37.2",
26
+ "use_cache": true,
27
+ "vocab_size": 32000,
28
+ "quantization_config": {
29
  "model.layers.0.mlp.down_proj": {
30
  "bias": false,
31
  "enable_norm": true,
 
6745
  -1,
6746
  4
6747
  ]
6748
+ },
6749
+ "quant_method": "vptq"
6750
+ }
6751
+ }