Update config.json
#8
by
mike-ravkine
- opened
- config.json +2 -2
config.json
CHANGED
@@ -11,8 +11,8 @@
|
|
11 |
"AutoModelForCausalLM": "modelling_RW.RWForCausalLM"
|
12 |
},
|
13 |
"bias": false,
|
14 |
-
"bos_token_id":
|
15 |
-
"eos_token_id":
|
16 |
"hidden_dropout": 0.0,
|
17 |
"hidden_size": 4544,
|
18 |
"initializer_range": 0.02,
|
|
|
11 |
"AutoModelForCausalLM": "modelling_RW.RWForCausalLM"
|
12 |
},
|
13 |
"bias": false,
|
14 |
+
"bos_token_id": 11,
|
15 |
+
"eos_token_id": 11,
|
16 |
"hidden_dropout": 0.0,
|
17 |
"hidden_size": 4544,
|
18 |
"initializer_range": 0.02,
|