bond005 commited on
Commit
5286319
1 Parent(s): 12348e2

The neural network initialization bug is fixed.

Browse files
modeling_hierarchical_classifier.py CHANGED
@@ -77,7 +77,7 @@ class DistanceBasedLogisticLoss(_Loss):
77
  inputs = inputs.view(-1)
78
  targets = targets.to(inputs.dtype).view(-1)
79
  p = distance_to_probability(inputs, self.margin)
80
- return 1.0 - torch.nn.functional.binary_cross_entropy(input=p, target=targets, reduction=self.reduction)
81
 
82
 
83
  class LayerGatingNetwork(torch.nn.Module):
@@ -128,11 +128,6 @@ class XLMRobertaXLForHierarchicalEmbedding(XLMRobertaXLPreTrainedModel, ABC):
128
 
129
  self.init_weights()
130
 
131
- def init_weights(self):
132
- super().init_weights()
133
- with torch.no_grad():
134
- self.layer_weights.reset_parameters()
135
-
136
  def forward(
137
  self,
138
  input_ids: Optional[torch.LongTensor] = None,
 
77
  inputs = inputs.view(-1)
78
  targets = targets.to(inputs.dtype).view(-1)
79
  p = distance_to_probability(inputs, self.margin)
80
+ return torch.nn.functional.binary_cross_entropy(input=p, target=targets, reduction=self.reduction)
81
 
82
 
83
  class LayerGatingNetwork(torch.nn.Module):
 
128
 
129
  self.init_weights()
130
 
 
 
 
 
 
131
  def forward(
132
  self,
133
  input_ids: Optional[torch.LongTensor] = None,