cognitivess
commited on
Commit
•
a7eb06e
1
Parent(s):
1a46d2c
Update cognitivess_model/configuration_cognitivess.py
Browse files
cognitivess_model/configuration_cognitivess.py
CHANGED
@@ -110,23 +110,23 @@ class CognitivessConfig(PretrainedConfig):
|
|
110 |
|
111 |
def __init__(
|
112 |
self,
|
113 |
-
vocab_size=
|
114 |
hidden_size=4096,
|
115 |
-
intermediate_size=
|
116 |
num_hidden_layers=32,
|
117 |
num_attention_heads=32,
|
118 |
-
num_key_value_heads=
|
119 |
hidden_act="silu",
|
120 |
-
max_position_embeddings=
|
121 |
initializer_range=0.02,
|
122 |
-
rms_norm_eps=1e-
|
123 |
use_cache=True,
|
124 |
-
pad_token_id=
|
125 |
-
bos_token_id=
|
126 |
-
eos_token_id=
|
127 |
pretraining_tp=1,
|
128 |
tie_word_embeddings=False,
|
129 |
-
rope_theta=
|
130 |
rope_scaling=None,
|
131 |
attention_bias=False,
|
132 |
attention_dropout=0.0,
|
|
|
110 |
|
111 |
def __init__(
|
112 |
self,
|
113 |
+
vocab_size=128256,
|
114 |
hidden_size=4096,
|
115 |
+
intermediate_size=14336,
|
116 |
num_hidden_layers=32,
|
117 |
num_attention_heads=32,
|
118 |
+
num_key_value_heads=8,
|
119 |
hidden_act="silu",
|
120 |
+
max_position_embeddings=8192,
|
121 |
initializer_range=0.02,
|
122 |
+
rms_norm_eps=1e-05,
|
123 |
use_cache=True,
|
124 |
+
pad_token_id=0,
|
125 |
+
bos_token_id=128000,
|
126 |
+
eos_token_id=128001,
|
127 |
pretraining_tp=1,
|
128 |
tie_word_embeddings=False,
|
129 |
+
rope_theta=500000.0,
|
130 |
rope_scaling=None,
|
131 |
attention_bias=False,
|
132 |
attention_dropout=0.0,
|