sara-m98 commited on
Commit
d5b4f4b
1 Parent(s): 4bf6fa9

Training in progress, epoch 1

Browse files
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "[MASK]": 128000
3
+ }
config.json ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/deberta-v3-base",
3
+ "architectures": [
4
+ "DebertaV2ForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "hidden_act": "gelu",
8
+ "hidden_dropout_prob": 0.1,
9
+ "hidden_size": 768,
10
+ "id2label": {
11
+ "0": "O",
12
+ "1": "B-ECO_GRASA",
13
+ "2": "I-ECO_GRASA",
14
+ "3": "B-ECO_FIBROGLANDULAR",
15
+ "4": "I-ECO_FIBROGLANDULAR",
16
+ "5": "B-ECO_HETERO",
17
+ "6": "I-ECO_HETERO",
18
+ "7": "B-ECO_OVALADA",
19
+ "8": "I-ECO_OVALADA",
20
+ "9": "B-ECO_REDONDA",
21
+ "10": "I-ECO_REDONDA",
22
+ "11": "B-ECO_IRREGULAR",
23
+ "12": "I-ECO_IRREGULAR",
24
+ "13": "B-ECO_PARALELA",
25
+ "14": "I-ECO_PARALELA",
26
+ "15": "B-ECO_NO_PARALELA",
27
+ "16": "I-ECO_NO_PARALELA",
28
+ "17": "B-ECO_CIRCUNSC",
29
+ "18": "I-ECO_CIRCUNSC",
30
+ "19": "B-ECO_INDIS",
31
+ "20": "I-ECO_INDIS",
32
+ "21": "B-ECO_ANGULAR",
33
+ "22": "I-ECO_ANGULAR",
34
+ "23": "B-ECO_LOBUL",
35
+ "24": "I-ECO_LOBUL",
36
+ "25": "B-ECO_ESPIC",
37
+ "26": "I-ECO_ESPIC",
38
+ "27": "B-ECO_ANECO",
39
+ "28": "I-ECO_ANECO",
40
+ "29": "B-ECO_HIPEREC",
41
+ "30": "I-ECO_HIPEREC",
42
+ "31": "B-ECO_COMPL_SOL",
43
+ "32": "I-ECO_COMPL_SOL",
44
+ "33": "B-ECO_HIPOEC",
45
+ "34": "I-ECO_HIPOEC",
46
+ "35": "B-ECO_ISOEC",
47
+ "36": "I-ECO_ISOEC",
48
+ "37": "B-ECO_HETERO_PATRON",
49
+ "38": "I-ECO_HETERO_PATRON",
50
+ "39": "B-ECO_CP_NEG",
51
+ "40": "I-ECO_CP_NEG",
52
+ "41": "B-ECO_CP_REFURZ",
53
+ "42": "I-ECO_CP_REFURZ",
54
+ "43": "B-ECO_CP_SOMBRA",
55
+ "44": "I-ECO_CP_SOMBRA",
56
+ "45": "B-ECO_CP_MIXTO",
57
+ "46": "I-ECO_CP_MIXTO",
58
+ "47": "B-ECO_CAL_INT",
59
+ "48": "I-ECO_CAL_INT",
60
+ "49": "B-ECO_CAL_EXT",
61
+ "50": "I-ECO_CAL_EXT",
62
+ "51": "B-ECO_CAL_INTRADL",
63
+ "52": "I-ECO_CAL_INTRADL",
64
+ "53": "B-ECO_DIST",
65
+ "54": "I-ECO_DIST",
66
+ "55": "B-ECO_DUCT",
67
+ "56": "I-ECO_DUCT",
68
+ "57": "B-ECO_ENGR_PIEL",
69
+ "58": "I-ECO_ENGR_PIEL",
70
+ "59": "B-ECO_RETRA_PIEL",
71
+ "60": "I-ECO_RETRA_PIEL",
72
+ "61": "B-ECO_EDEMA",
73
+ "62": "I-ECO_EDEMA",
74
+ "63": "B-ECO_VAS_AUSEN",
75
+ "64": "I-ECO_VAS_AUSEN",
76
+ "65": "B-ECO_VAS_INT",
77
+ "66": "I-ECO_VAS_INT",
78
+ "67": "B-ECO_VAS_PERIF",
79
+ "68": "I-ECO_VAS_PERIF",
80
+ "69": "B-ECO_ELAS_BLAND",
81
+ "70": "I-ECO_ELAS_BLAND",
82
+ "71": "B-ECO_ELAS_MED",
83
+ "72": "I-ECO_ELAS_MED",
84
+ "73": "B-ECO_ELAS_DUR",
85
+ "74": "I-ECO_ELAS_DUR",
86
+ "75": "B-ECO_QUIST_SIMP",
87
+ "76": "I-ECO_QUIST_SIMP",
88
+ "77": "B-ECO_MQUIST_AGRUP",
89
+ "78": "I-ECO_MQUIST_AGRUP",
90
+ "79": "B-ECO_QUIST_COMPL",
91
+ "80": "I-ECO_QUIST_COMPL",
92
+ "81": "B-ECO_QUIST_PIEL",
93
+ "82": "I-ECO_QUIST_PIEL",
94
+ "83": "B-ECO_CUERP_EXTR",
95
+ "84": "I-ECO_CUERP_EXTR",
96
+ "85": "B-ECO_GANG_INTRA",
97
+ "86": "I-ECO_GANG_INTRA",
98
+ "87": "B-ECO_GANG_AXIL",
99
+ "88": "I-ECO_GANG_AXIL",
100
+ "89": "B-ECO_ALT_VASC",
101
+ "90": "I-ECO_ALT_VASC",
102
+ "91": "B-ECO_MONDOR",
103
+ "92": "I-ECO_MONDOR",
104
+ "93": "B-ECO_ACUM_LIQ",
105
+ "94": "I-ECO_ACUM_LIQ",
106
+ "95": "B-ECO_NECR_GRAS",
107
+ "96": "I-ECO_NECR_GRAS"
108
+ },
109
+ "initializer_range": 0.02,
110
+ "intermediate_size": 3072,
111
+ "label2id": {
112
+ "B-ECO_ACUM_LIQ": 93,
113
+ "B-ECO_ALT_VASC": 89,
114
+ "B-ECO_ANECO": 27,
115
+ "B-ECO_ANGULAR": 21,
116
+ "B-ECO_CAL_EXT": 49,
117
+ "B-ECO_CAL_INT": 47,
118
+ "B-ECO_CAL_INTRADL": 51,
119
+ "B-ECO_CIRCUNSC": 17,
120
+ "B-ECO_COMPL_SOL": 31,
121
+ "B-ECO_CP_MIXTO": 45,
122
+ "B-ECO_CP_NEG": 39,
123
+ "B-ECO_CP_REFURZ": 41,
124
+ "B-ECO_CP_SOMBRA": 43,
125
+ "B-ECO_CUERP_EXTR": 83,
126
+ "B-ECO_DIST": 53,
127
+ "B-ECO_DUCT": 55,
128
+ "B-ECO_EDEMA": 61,
129
+ "B-ECO_ELAS_BLAND": 69,
130
+ "B-ECO_ELAS_DUR": 73,
131
+ "B-ECO_ELAS_MED": 71,
132
+ "B-ECO_ENGR_PIEL": 57,
133
+ "B-ECO_ESPIC": 25,
134
+ "B-ECO_FIBROGLANDULAR": 3,
135
+ "B-ECO_GANG_AXIL": 87,
136
+ "B-ECO_GANG_INTRA": 85,
137
+ "B-ECO_GRASA": 1,
138
+ "B-ECO_HETERO": 5,
139
+ "B-ECO_HETERO_PATRON": 37,
140
+ "B-ECO_HIPEREC": 29,
141
+ "B-ECO_HIPOEC": 33,
142
+ "B-ECO_INDIS": 19,
143
+ "B-ECO_IRREGULAR": 11,
144
+ "B-ECO_ISOEC": 35,
145
+ "B-ECO_LOBUL": 23,
146
+ "B-ECO_MONDOR": 91,
147
+ "B-ECO_MQUIST_AGRUP": 77,
148
+ "B-ECO_NECR_GRAS": 95,
149
+ "B-ECO_NO_PARALELA": 15,
150
+ "B-ECO_OVALADA": 7,
151
+ "B-ECO_PARALELA": 13,
152
+ "B-ECO_QUIST_COMPL": 79,
153
+ "B-ECO_QUIST_PIEL": 81,
154
+ "B-ECO_QUIST_SIMP": 75,
155
+ "B-ECO_REDONDA": 9,
156
+ "B-ECO_RETRA_PIEL": 59,
157
+ "B-ECO_VAS_AUSEN": 63,
158
+ "B-ECO_VAS_INT": 65,
159
+ "B-ECO_VAS_PERIF": 67,
160
+ "I-ECO_ACUM_LIQ": 94,
161
+ "I-ECO_ALT_VASC": 90,
162
+ "I-ECO_ANECO": 28,
163
+ "I-ECO_ANGULAR": 22,
164
+ "I-ECO_CAL_EXT": 50,
165
+ "I-ECO_CAL_INT": 48,
166
+ "I-ECO_CAL_INTRADL": 52,
167
+ "I-ECO_CIRCUNSC": 18,
168
+ "I-ECO_COMPL_SOL": 32,
169
+ "I-ECO_CP_MIXTO": 46,
170
+ "I-ECO_CP_NEG": 40,
171
+ "I-ECO_CP_REFURZ": 42,
172
+ "I-ECO_CP_SOMBRA": 44,
173
+ "I-ECO_CUERP_EXTR": 84,
174
+ "I-ECO_DIST": 54,
175
+ "I-ECO_DUCT": 56,
176
+ "I-ECO_EDEMA": 62,
177
+ "I-ECO_ELAS_BLAND": 70,
178
+ "I-ECO_ELAS_DUR": 74,
179
+ "I-ECO_ELAS_MED": 72,
180
+ "I-ECO_ENGR_PIEL": 58,
181
+ "I-ECO_ESPIC": 26,
182
+ "I-ECO_FIBROGLANDULAR": 4,
183
+ "I-ECO_GANG_AXIL": 88,
184
+ "I-ECO_GANG_INTRA": 86,
185
+ "I-ECO_GRASA": 2,
186
+ "I-ECO_HETERO": 6,
187
+ "I-ECO_HETERO_PATRON": 38,
188
+ "I-ECO_HIPEREC": 30,
189
+ "I-ECO_HIPOEC": 34,
190
+ "I-ECO_INDIS": 20,
191
+ "I-ECO_IRREGULAR": 12,
192
+ "I-ECO_ISOEC": 36,
193
+ "I-ECO_LOBUL": 24,
194
+ "I-ECO_MONDOR": 92,
195
+ "I-ECO_MQUIST_AGRUP": 78,
196
+ "I-ECO_NECR_GRAS": 96,
197
+ "I-ECO_NO_PARALELA": 16,
198
+ "I-ECO_OVALADA": 8,
199
+ "I-ECO_PARALELA": 14,
200
+ "I-ECO_QUIST_COMPL": 80,
201
+ "I-ECO_QUIST_PIEL": 82,
202
+ "I-ECO_QUIST_SIMP": 76,
203
+ "I-ECO_REDONDA": 10,
204
+ "I-ECO_RETRA_PIEL": 60,
205
+ "I-ECO_VAS_AUSEN": 64,
206
+ "I-ECO_VAS_INT": 66,
207
+ "I-ECO_VAS_PERIF": 68,
208
+ "O": 0
209
+ },
210
+ "layer_norm_eps": 1e-07,
211
+ "max_position_embeddings": 512,
212
+ "max_relative_positions": -1,
213
+ "model_type": "deberta-v2",
214
+ "norm_rel_ebd": "layer_norm",
215
+ "num_attention_heads": 12,
216
+ "num_hidden_layers": 12,
217
+ "pad_token_id": 0,
218
+ "pooler_dropout": 0,
219
+ "pooler_hidden_act": "gelu",
220
+ "pooler_hidden_size": 768,
221
+ "pos_att_type": [
222
+ "p2c",
223
+ "c2p"
224
+ ],
225
+ "position_biased_input": false,
226
+ "position_buckets": 256,
227
+ "relative_attention": true,
228
+ "share_att_key": true,
229
+ "torch_dtype": "float32",
230
+ "transformers_version": "4.42.3",
231
+ "type_vocab_size": 0,
232
+ "vocab_size": 128100
233
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c2aa428a42cfac586e056743858ba8242474e05305ea9ce6299165d095a6f38
3
+ size 735648964
runs/Jul02_12-02-04_43af6b0eb7d1/events.out.tfevents.1719921726.43af6b0eb7d1.295.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:541ab00325d5748ff557f8749b5de52e1486fb69baa94d095df9b45ba91ba7fe
3
+ size 10672
special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "[CLS]",
3
+ "cls_token": "[CLS]",
4
+ "eos_token": "[SEP]",
5
+ "mask_token": "[MASK]",
6
+ "pad_token": "[PAD]",
7
+ "sep_token": "[SEP]",
8
+ "unk_token": {
9
+ "content": "[UNK]",
10
+ "lstrip": false,
11
+ "normalized": true,
12
+ "rstrip": false,
13
+ "single_word": false
14
+ }
15
+ }
spm.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c679fbf93643d19aab7ee10c0b99e460bdbc02fedf34b92b05af343b4af586fd
3
+ size 2464616
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": true,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "[PAD]",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "[CLS]",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "[SEP]",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "[UNK]",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "128000": {
37
+ "content": "[MASK]",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ }
44
+ },
45
+ "bos_token": "[CLS]",
46
+ "clean_up_tokenization_spaces": true,
47
+ "cls_token": "[CLS]",
48
+ "do_lower_case": false,
49
+ "eos_token": "[SEP]",
50
+ "mask_token": "[MASK]",
51
+ "model_max_length": 1000000000000000019884624838656,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "sp_model_kwargs": {},
55
+ "split_by_punct": false,
56
+ "tokenizer_class": "DebertaV2Tokenizer",
57
+ "unk_token": "[UNK]",
58
+ "vocab_type": "spm"
59
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f2f12cab7c9be04125050183bf0df43add97d5b91945771501eaecf24ffa95f
3
+ size 5112