hugo-albert commited on
Commit
dac2428
1 Parent(s): b609a69

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -10,142 +10,142 @@
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "Fe",
14
- "1": "vmg",
15
- "2": "de",
16
- "3": "W",
17
- "4": "np",
18
- "5": "vai",
19
- "6": "Fh",
20
- "7": "vsi",
21
- "8": "vsg",
22
- "9": "Fat",
23
- "10": "Fs",
24
- "11": "dt",
25
- "12": "sp",
26
- "13": "Fc",
27
- "14": "pt",
28
- "15": "pi",
29
- "16": "rn",
30
- "17": "vas",
31
- "18": "Zm",
32
- "19": "X",
33
- "20": "vms",
34
- "21": "rg",
35
- "22": "vsn",
36
- "23": "da",
37
- "24": "vsm",
38
- "25": "nc",
39
- "26": "vss",
40
- "27": "pe",
41
- "28": "Fg",
42
- "29": "Fx",
43
- "30": "vmp",
44
- "31": "px",
45
- "32": "aq",
46
- "33": "pn",
47
- "34": "dn",
48
- "35": "Fd",
49
- "36": "ao",
50
- "37": "Fp",
51
- "38": "Zp",
52
- "39": "vap",
53
- "40": "Y",
54
- "41": "I",
55
- "42": "cs",
56
- "43": "pr",
57
- "44": "Z",
58
- "45": "vmm",
59
- "46": "vmi",
60
- "47": "Fpt",
61
- "48": "Fit",
62
- "49": "van",
63
- "50": "vag",
64
- "51": "vmn",
65
- "52": "p0",
66
- "53": "Fia",
67
- "54": "i",
68
- "55": "Faa",
69
- "56": "vam",
70
- "57": "Fpa",
71
- "58": "pp",
72
- "59": "cc",
73
- "60": "pd",
74
- "61": "vsp",
75
- "62": "dp",
76
- "63": "Fz",
77
- "64": "dd",
78
- "65": "di"
79
  },
80
  "initializer_range": 0.02,
81
  "intermediate_size": 3072,
82
  "label2id": {
83
- "Faa": 55,
84
- "Fat": 9,
85
- "Fc": 13,
86
- "Fd": 35,
87
- "Fe": 0,
88
- "Fg": 28,
89
- "Fh": 6,
90
- "Fia": 53,
91
- "Fit": 48,
92
- "Fp": 37,
93
- "Fpa": 57,
94
- "Fpt": 47,
95
- "Fs": 10,
96
- "Fx": 29,
97
- "Fz": 63,
98
- "I": 41,
99
- "W": 3,
100
- "X": 19,
101
- "Y": 40,
102
- "Z": 44,
103
- "Zm": 18,
104
- "Zp": 38,
105
- "ao": 36,
106
- "aq": 32,
107
- "cc": 59,
108
- "cs": 42,
109
- "da": 23,
110
- "dd": 64,
111
- "de": 2,
112
- "di": 65,
113
- "dn": 34,
114
- "dp": 62,
115
- "dt": 11,
116
- "i": 54,
117
- "nc": 25,
118
- "np": 4,
119
- "p0": 52,
120
- "pd": 60,
121
- "pe": 27,
122
- "pi": 15,
123
- "pn": 33,
124
- "pp": 58,
125
- "pr": 43,
126
- "pt": 14,
127
- "px": 31,
128
- "rg": 21,
129
- "rn": 16,
130
- "sp": 12,
131
- "vag": 50,
132
- "vai": 5,
133
- "vam": 56,
134
- "van": 49,
135
- "vap": 39,
136
- "vas": 17,
137
- "vmg": 1,
138
- "vmi": 46,
139
- "vmm": 45,
140
- "vmn": 51,
141
- "vmp": 30,
142
- "vms": 20,
143
- "vsg": 8,
144
- "vsi": 7,
145
- "vsm": 24,
146
- "vsn": 22,
147
- "vsp": 61,
148
- "vss": 26
149
  },
150
  "layer_norm_eps": 1e-12,
151
  "max_position_embeddings": 512,
 
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "rn",
14
+ "1": "di",
15
+ "2": "Fd",
16
+ "3": "Fh",
17
+ "4": "pn",
18
+ "5": "Fp",
19
+ "6": "ao",
20
+ "7": "Fc",
21
+ "8": "Fia",
22
+ "9": "Fg",
23
+ "10": "vss",
24
+ "11": "Faa",
25
+ "12": "I",
26
+ "13": "Fat",
27
+ "14": "pp",
28
+ "15": "nc",
29
+ "16": "vmp",
30
+ "17": "Z",
31
+ "18": "aq",
32
+ "19": "px",
33
+ "20": "dp",
34
+ "21": "Zp",
35
+ "22": "vap",
36
+ "23": "vsn",
37
+ "24": "vms",
38
+ "25": "da",
39
+ "26": "Fpa",
40
+ "27": "vsi",
41
+ "28": "vam",
42
+ "29": "Y",
43
+ "30": "vsg",
44
+ "31": "dd",
45
+ "32": "Fz",
46
+ "33": "rg",
47
+ "34": "vas",
48
+ "35": "vsp",
49
+ "36": "W",
50
+ "37": "sp",
51
+ "38": "vmn",
52
+ "39": "vmm",
53
+ "40": "p0",
54
+ "41": "pt",
55
+ "42": "Zm",
56
+ "43": "dn",
57
+ "44": "vai",
58
+ "45": "vmi",
59
+ "46": "Fit",
60
+ "47": "i",
61
+ "48": "cs",
62
+ "49": "vsm",
63
+ "50": "Fs",
64
+ "51": "de",
65
+ "52": "X",
66
+ "53": "pr",
67
+ "54": "cc",
68
+ "55": "vmg",
69
+ "56": "pi",
70
+ "57": "Fe",
71
+ "58": "Fpt",
72
+ "59": "van",
73
+ "60": "pe",
74
+ "61": "vag",
75
+ "62": "pd",
76
+ "63": "np",
77
+ "64": "Fx",
78
+ "65": "dt"
79
  },
80
  "initializer_range": 0.02,
81
  "intermediate_size": 3072,
82
  "label2id": {
83
+ "Faa": 11,
84
+ "Fat": 13,
85
+ "Fc": 7,
86
+ "Fd": 2,
87
+ "Fe": 57,
88
+ "Fg": 9,
89
+ "Fh": 3,
90
+ "Fia": 8,
91
+ "Fit": 46,
92
+ "Fp": 5,
93
+ "Fpa": 26,
94
+ "Fpt": 58,
95
+ "Fs": 50,
96
+ "Fx": 64,
97
+ "Fz": 32,
98
+ "I": 12,
99
+ "W": 36,
100
+ "X": 52,
101
+ "Y": 29,
102
+ "Z": 17,
103
+ "Zm": 42,
104
+ "Zp": 21,
105
+ "ao": 6,
106
+ "aq": 18,
107
+ "cc": 54,
108
+ "cs": 48,
109
+ "da": 25,
110
+ "dd": 31,
111
+ "de": 51,
112
+ "di": 1,
113
+ "dn": 43,
114
+ "dp": 20,
115
+ "dt": 65,
116
+ "i": 47,
117
+ "nc": 15,
118
+ "np": 63,
119
+ "p0": 40,
120
+ "pd": 62,
121
+ "pe": 60,
122
+ "pi": 56,
123
+ "pn": 4,
124
+ "pp": 14,
125
+ "pr": 53,
126
+ "pt": 41,
127
+ "px": 19,
128
+ "rg": 33,
129
+ "rn": 0,
130
+ "sp": 37,
131
+ "vag": 61,
132
+ "vai": 44,
133
+ "vam": 28,
134
+ "van": 59,
135
+ "vap": 22,
136
+ "vas": 34,
137
+ "vmg": 55,
138
+ "vmi": 45,
139
+ "vmm": 39,
140
+ "vmn": 38,
141
+ "vmp": 16,
142
+ "vms": 24,
143
+ "vsg": 30,
144
+ "vsi": 27,
145
+ "vsm": 49,
146
+ "vsn": 23,
147
+ "vsp": 35,
148
+ "vss": 10
149
  },
150
  "layer_norm_eps": 1e-12,
151
  "max_position_embeddings": 512,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ccc0c7efbcbfce4be207c2decfb0f1123e963a8c4f17e4e010ba9e5cfa065bcd
3
  size 437267512
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abd3bdd0f741f5e5b6f048a75a917079d16800942b55877def72b0960a49fbb6
3
  size 437267512
runs/Oct28_12-24-19_4226ec852e37/events.out.tfevents.1730118268.4226ec852e37.15072.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f953458099969e853938405d89f8773d212d198cd279c275ebc65411ee186b3
3
+ size 6965
runs/Oct28_12-54-21_4226ec852e37/events.out.tfevents.1730120075.4226ec852e37.15072.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16ba2b09851b227dde2501004f43e5f37a25fb013b44f35b9792fe4e29d8a4fb
3
+ size 7648
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0726a53cbe4f1e3a5439ba6347813fbc3e534ecc5947c07667f4636ff8178f3d
3
  size 5176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46eb2817d9234f82002f4ecb0050fe76839c9510dc0cb7d961bf7f25d967d3bb
3
  size 5176