HandH1998 commited on
Commit
a45b66e
·
verified ·
1 Parent(s): e8da814

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -0
config.json CHANGED
@@ -3,6 +3,11 @@
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
 
 
 
 
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
 
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
+ "quantization_config": {
7
+ "group_size": -1,
8
+ "quant_method": "qqq",
9
+ "wbits": 4
10
+ },
11
  "attention_bias": false,
12
  "attention_dropout": 0.0,
13
  "bos_token_id": 128000,