voidful commited on
Commit
94cc4e1
1 Parent(s): c50ef3f

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "architectures": [
3
  "MllamaForCausalLM"
4
  ],
5
- "bos_token_id": 128000,
6
  "cross_attention_layers": [
7
  3,
8
  8,
@@ -14,7 +14,7 @@
14
  38
15
  ],
16
  "dropout": 0,
17
- "eos_token_id": 128001,
18
  "hidden_act": "silu",
19
  "hidden_size": 4096,
20
  "initializer_range": 0.02,
@@ -24,7 +24,7 @@
24
  "num_attention_heads": 32,
25
  "num_hidden_layers": 40,
26
  "num_key_value_heads": 8,
27
- "pad_token_id": 128004,
28
  "rms_norm_eps": 1e-05,
29
  "rope_scaling": {
30
  "factor": 8.0,
 
2
  "architectures": [
3
  "MllamaForCausalLM"
4
  ],
5
+ "bos_token_id": 50257,
6
  "cross_attention_layers": [
7
  3,
8
  8,
 
14
  38
15
  ],
16
  "dropout": 0,
17
+ "eos_token_id": 50257,
18
  "hidden_act": "silu",
19
  "hidden_size": 4096,
20
  "initializer_range": 0.02,
 
24
  "num_attention_heads": 32,
25
  "num_hidden_layers": 40,
26
  "num_key_value_heads": 8,
27
+ "pad_token_id": 50257,
28
  "rms_norm_eps": 1e-05,
29
  "rope_scaling": {
30
  "factor": 8.0,