zRzRzRzRzRzRzR commited on
Commit
3094fed
1 Parent(s): ee343b5

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -10
config.json CHANGED
@@ -25,25 +25,19 @@
25
  "kv_channels": 128,
26
  "layernorm_epsilon": 1.5625e-07,
27
  "multi_query_attention": true,
28
- "multi_query_group_num": 2,
29
  "num_attention_heads": 32,
30
  "num_hidden_layers": 40,
31
  "num_layers": 40,
32
- "rope_ratio": 500,
33
  "original_rope": true,
34
  "padded_vocab_size": 151552,
35
  "post_layer_norm": true,
36
  "rmsnorm": true,
37
- "seq_length": 131072,
38
  "use_cache": true,
39
  "torch_dtype": "bfloat16",
40
  "transformers_version": "4.30.2",
41
  "tie_word_embeddings": false,
42
- "eos_token_id": [
43
- 151329,
44
- 151336,
45
- 151338
46
- ],
47
  "pad_token_id": 151329
48
- }
49
-
 
25
  "kv_channels": 128,
26
  "layernorm_epsilon": 1.5625e-07,
27
  "multi_query_attention": true,
28
+ "multi_query_group_num": 4,
29
  "num_attention_heads": 32,
30
  "num_hidden_layers": 40,
31
  "num_layers": 40,
32
+ "rope_ratio": 10000,
33
  "original_rope": true,
34
  "padded_vocab_size": 151552,
35
  "post_layer_norm": true,
36
  "rmsnorm": true,
37
+ "seq_length": 1048576,
38
  "use_cache": true,
39
  "torch_dtype": "bfloat16",
40
  "transformers_version": "4.30.2",
41
  "tie_word_embeddings": false,
42
+ "eos_token_id": [151329, 151336, 151338],
 
 
 
 
43
  "pad_token_id": 151329