Fix BOS/EOS token
Browse files- config.json +3 -3
config.json
CHANGED
@@ -4,8 +4,8 @@
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
-
"bos_token_id":
|
8 |
-
"eos_token_id":
|
9 |
"hidden_act": "silu",
|
10 |
"hidden_size": 8192,
|
11 |
"initializer_range": 0.02,
|
@@ -39,4 +39,4 @@
|
|
39 |
"model_file_base_name": "model",
|
40 |
"quant_method": "gptq"
|
41 |
}
|
42 |
-
}
|
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
+
"bos_token_id": 32001,
|
8 |
+
"eos_token_id": 32001,
|
9 |
"hidden_act": "silu",
|
10 |
"hidden_size": 8192,
|
11 |
"initializer_range": 0.02,
|
|
|
39 |
"model_file_base_name": "model",
|
40 |
"quant_method": "gptq"
|
41 |
}
|
42 |
+
}
|