TheBloke commited on
Commit
3397d7e
1 Parent(s): 10b5f27

Fix BOS/EOS token

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -4,8 +4,8 @@
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
- "bos_token_id": 1,
8
- "eos_token_id": 2,
9
  "hidden_act": "silu",
10
  "hidden_size": 8192,
11
  "initializer_range": 0.02,
@@ -39,4 +39,4 @@
39
  "model_file_base_name": "model",
40
  "quant_method": "gptq"
41
  }
42
- }
 
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
+ "bos_token_id": 32001,
8
+ "eos_token_id": 32001,
9
  "hidden_act": "silu",
10
  "hidden_size": 8192,
11
  "initializer_range": 0.02,
 
39
  "model_file_base_name": "model",
40
  "quant_method": "gptq"
41
  }
42
+ }