T-Almeida commited on
Commit
7a969c2
1 Parent(s): 120b87e

Upload model

Browse files
config.json CHANGED
@@ -5,6 +5,9 @@
5
  ],
6
  "args_random_seed": 42,
7
  "attention_probs_dropout_prob": 0.1,
 
 
 
8
  "augmentation": "None",
9
  "auto_map": {
10
  "AutoConfig": "configuration_multiheadcrf.MultiHeadCRFConfig",
@@ -53,6 +56,6 @@
53
  "transformers_version": "4.40.2",
54
  "type_vocab_size": 1,
55
  "use_cache": true,
56
- "version": "0.1.2",
57
  "vocab_size": 50262
58
  }
 
5
  ],
6
  "args_random_seed": 42,
7
  "attention_probs_dropout_prob": 0.1,
8
+ "aug_prob": [
9
+ 0.5
10
+ ],
11
  "augmentation": "None",
12
  "auto_map": {
13
  "AutoConfig": "configuration_multiheadcrf.MultiHeadCRFConfig",
 
56
  "transformers_version": "4.40.2",
57
  "type_vocab_size": 1,
58
  "use_cache": true,
59
+ "version": "0.1.3",
60
  "vocab_size": 50262
61
  }
configuration_multiheadcrf.py CHANGED
@@ -14,9 +14,10 @@ class MultiHeadCRFConfig(PretrainedConfig):
14
  context_size = 64,
15
  percentage_tags = 0.2,
16
  p_augmentation = 0.5,
 
17
  crf_reduction = "mean",
18
  freeze = False,
19
- version="0.1.2",
20
  **kwargs,
21
  ):
22
  self.classes = classes
@@ -26,8 +27,10 @@ class MultiHeadCRFConfig(PretrainedConfig):
26
  self.context_size = context_size
27
  self.percentage_tags = percentage_tags
28
  self.p_augmentation = p_augmentation
 
29
  self.crf_reduction = crf_reduction
30
  self.freeze=freeze
 
31
  super().__init__(**kwargs)
32
 
33
 
 
14
  context_size = 64,
15
  percentage_tags = 0.2,
16
  p_augmentation = 0.5,
17
+ aug_prob = 0.5,
18
  crf_reduction = "mean",
19
  freeze = False,
20
+ version="0.1.3",
21
  **kwargs,
22
  ):
23
  self.classes = classes
 
27
  self.context_size = context_size
28
  self.percentage_tags = percentage_tags
29
  self.p_augmentation = p_augmentation
30
+ self.aug_prob = aug_prob,
31
  self.crf_reduction = crf_reduction
32
  self.freeze=freeze
33
+ self.version = version
34
  super().__init__(**kwargs)
35
 
36
 
modeling_multiheadcrf.py CHANGED
@@ -41,7 +41,15 @@ class RobertaMultiHeadCRFModel(PreTrainedModel):
41
 
42
  if self.config.freeze == True:
43
  self.manage_freezing()
44
-
 
 
 
 
 
 
 
 
45
  def manage_freezing(self):
46
  for _, param in self.bert.embeddings.named_parameters():
47
  param.requires_grad = False
 
41
 
42
  if self.config.freeze == True:
43
  self.manage_freezing()
44
+
45
+ def training_mode(self):
46
+ # for some reason these layers are not being correctly init
47
+ # probably related with the lifecycle of the hf .from_pretrained method
48
+ self.dense.reset_parameters()
49
+ self.classifier.reset_parameters()
50
+ self.crf.reset_parameters()
51
+ self.crf.mask_impossible_transitions()
52
+
53
  def manage_freezing(self):
54
  for _, param in self.bert.embeddings.named_parameters():
55
  param.requires_grad = False