KoichiYasuoka
commited on
Commit
•
0d621b7
1
Parent(s):
95bc522
Update modeling_ltgbert.py
Browse filesWhen initializing `LtgbertForTokenClassification` several `LayerNorm`s don't have `weight` or `bias`.
- modeling_ltgbert.py +4 -2
modeling_ltgbert.py
CHANGED
@@ -252,8 +252,10 @@ class LtgbertPreTrainedModel(PreTrainedModel):
|
|
252 |
elif isinstance(module, nn.Embedding):
|
253 |
nn.init.trunc_normal_(module.weight.data, mean=0.0, std=std, a=-2*std, b=2*std)
|
254 |
elif isinstance(module, nn.LayerNorm):
|
255 |
-
module.bias
|
256 |
-
|
|
|
|
|
257 |
|
258 |
|
259 |
class LtgbertModel(LtgbertPreTrainedModel):
|
|
|
252 |
elif isinstance(module, nn.Embedding):
|
253 |
nn.init.trunc_normal_(module.weight.data, mean=0.0, std=std, a=-2*std, b=2*std)
|
254 |
elif isinstance(module, nn.LayerNorm):
|
255 |
+
if module.bias is not None:
|
256 |
+
module.bias.data.zero_()
|
257 |
+
if module.weight is not None:
|
258 |
+
module.weight.data.fill_(1.0)
|
259 |
|
260 |
|
261 |
class LtgbertModel(LtgbertPreTrainedModel):
|