williamberman
commited on
Commit
•
f75f3f6
1
Parent(s):
fdd7649
Update transformer/config.json
Browse files- transformer/config.json +1 -1
transformer/config.json
CHANGED
@@ -25,7 +25,7 @@
|
|
25 |
"intermediate_size": 2816,
|
26 |
"layer_norm_before_mlm": false,
|
27 |
"layer_norm_embedddings": false,
|
28 |
-
"layer_norm_eps":
|
29 |
"learn_uncond_embeds": false,
|
30 |
"ln_elementwise_affine": true,
|
31 |
"mask_token_id": 8255,
|
|
|
25 |
"intermediate_size": 2816,
|
26 |
"layer_norm_before_mlm": false,
|
27 |
"layer_norm_embedddings": false,
|
28 |
+
"layer_norm_eps": 0.000001,
|
29 |
"learn_uncond_embeds": false,
|
30 |
"ln_elementwise_affine": true,
|
31 |
"mask_token_id": 8255,
|