init
Browse filesfor eval
- all_results.json +12 -0
- config.json +439 -0
- eval_results.json +8 -0
- preprocessor_config.json +23 -0
- pytorch_model.bin +3 -0
- train_results.json +7 -0
- trainer_state.json +775 -0
- training_args.bin +3 -0
all_results.json
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 49.95,
|
3 |
+
"eval_accuracy": 0.8297674418604651,
|
4 |
+
"eval_loss": 0.6420783400535583,
|
5 |
+
"eval_runtime": 6.0573,
|
6 |
+
"eval_samples_per_second": 354.943,
|
7 |
+
"eval_steps_per_second": 0.825,
|
8 |
+
"train_loss": 1.65314315032959,
|
9 |
+
"train_runtime": 3746.3389,
|
10 |
+
"train_samples_per_second": 286.947,
|
11 |
+
"train_steps_per_second": 0.133
|
12 |
+
}
|
config.json
ADDED
@@ -0,0 +1,439 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "microsoft/resnet-18",
|
3 |
+
"architectures": [
|
4 |
+
"ResNetForImageClassification"
|
5 |
+
],
|
6 |
+
"depths": [
|
7 |
+
2,
|
8 |
+
2,
|
9 |
+
2,
|
10 |
+
2
|
11 |
+
],
|
12 |
+
"downsample_in_first_stage": false,
|
13 |
+
"embedding_size": 64,
|
14 |
+
"hidden_act": "relu",
|
15 |
+
"hidden_sizes": [
|
16 |
+
64,
|
17 |
+
128,
|
18 |
+
256,
|
19 |
+
512
|
20 |
+
],
|
21 |
+
"id2label": {
|
22 |
+
"0": "n01443537",
|
23 |
+
"1": "n01629819",
|
24 |
+
"2": "n01641577",
|
25 |
+
"3": "n01644900",
|
26 |
+
"4": "n01698640",
|
27 |
+
"5": "n01742172",
|
28 |
+
"6": "n01768244",
|
29 |
+
"7": "n01770393",
|
30 |
+
"8": "n01774384",
|
31 |
+
"9": "n01774750",
|
32 |
+
"10": "n01784675",
|
33 |
+
"11": "n01882714",
|
34 |
+
"12": "n01910747",
|
35 |
+
"13": "n01917289",
|
36 |
+
"14": "n01944390",
|
37 |
+
"15": "n01950731",
|
38 |
+
"16": "n01983481",
|
39 |
+
"17": "n01984695",
|
40 |
+
"18": "n02002724",
|
41 |
+
"19": "n02056570",
|
42 |
+
"20": "n02058221",
|
43 |
+
"21": "n02074367",
|
44 |
+
"22": "n02094433",
|
45 |
+
"23": "n02099601",
|
46 |
+
"24": "n02099712",
|
47 |
+
"25": "n02106662",
|
48 |
+
"26": "n02113799",
|
49 |
+
"27": "n02123045",
|
50 |
+
"28": "n02123394",
|
51 |
+
"29": "n02124075",
|
52 |
+
"30": "n02125311",
|
53 |
+
"31": "n02129165",
|
54 |
+
"32": "n02132136",
|
55 |
+
"33": "n02165456",
|
56 |
+
"34": "n02226429",
|
57 |
+
"35": "n02231487",
|
58 |
+
"36": "n02233338",
|
59 |
+
"37": "n02236044",
|
60 |
+
"38": "n02268443",
|
61 |
+
"39": "n02279972",
|
62 |
+
"40": "n02281406",
|
63 |
+
"41": "n02321529",
|
64 |
+
"42": "n02364673",
|
65 |
+
"43": "n02395406",
|
66 |
+
"44": "n02403003",
|
67 |
+
"45": "n02410509",
|
68 |
+
"46": "n02415577",
|
69 |
+
"47": "n02423022",
|
70 |
+
"48": "n02437312",
|
71 |
+
"49": "n02480495",
|
72 |
+
"50": "n02481823",
|
73 |
+
"51": "n02486410",
|
74 |
+
"52": "n02504458",
|
75 |
+
"53": "n02509815",
|
76 |
+
"54": "n02666347",
|
77 |
+
"55": "n02669723",
|
78 |
+
"56": "n02699494",
|
79 |
+
"57": "n02769748",
|
80 |
+
"58": "n02788148",
|
81 |
+
"59": "n02791270",
|
82 |
+
"60": "n02793495",
|
83 |
+
"61": "n02795169",
|
84 |
+
"62": "n02802426",
|
85 |
+
"63": "n02808440",
|
86 |
+
"64": "n02814533",
|
87 |
+
"65": "n02814860",
|
88 |
+
"66": "n02815834",
|
89 |
+
"67": "n02823428",
|
90 |
+
"68": "n02837789",
|
91 |
+
"69": "n02841315",
|
92 |
+
"70": "n02843684",
|
93 |
+
"71": "n02883205",
|
94 |
+
"72": "n02892201",
|
95 |
+
"73": "n02909870",
|
96 |
+
"74": "n02917067",
|
97 |
+
"75": "n02927161",
|
98 |
+
"76": "n02948072",
|
99 |
+
"77": "n02950826",
|
100 |
+
"78": "n02963159",
|
101 |
+
"79": "n02977058",
|
102 |
+
"80": "n02988304",
|
103 |
+
"81": "n03014705",
|
104 |
+
"82": "n03026506",
|
105 |
+
"83": "n03042490",
|
106 |
+
"84": "n03085013",
|
107 |
+
"85": "n03089624",
|
108 |
+
"86": "n03100240",
|
109 |
+
"87": "n03126707",
|
110 |
+
"88": "n03160309",
|
111 |
+
"89": "n03179701",
|
112 |
+
"90": "n03201208",
|
113 |
+
"91": "n03255030",
|
114 |
+
"92": "n03355925",
|
115 |
+
"93": "n03373237",
|
116 |
+
"94": "n03388043",
|
117 |
+
"95": "n03393912",
|
118 |
+
"96": "n03400231",
|
119 |
+
"97": "n03404251",
|
120 |
+
"98": "n03424325",
|
121 |
+
"99": "n03444034",
|
122 |
+
"100": "n03447447",
|
123 |
+
"101": "n03544143",
|
124 |
+
"102": "n03584254",
|
125 |
+
"103": "n03599486",
|
126 |
+
"104": "n03617480",
|
127 |
+
"105": "n03637318",
|
128 |
+
"106": "n03649909",
|
129 |
+
"107": "n03662601",
|
130 |
+
"108": "n03670208",
|
131 |
+
"109": "n03706229",
|
132 |
+
"110": "n03733131",
|
133 |
+
"111": "n03763968",
|
134 |
+
"112": "n03770439",
|
135 |
+
"113": "n03796401",
|
136 |
+
"114": "n03814639",
|
137 |
+
"115": "n03837869",
|
138 |
+
"116": "n03838899",
|
139 |
+
"117": "n03854065",
|
140 |
+
"118": "n03891332",
|
141 |
+
"119": "n03902125",
|
142 |
+
"120": "n03930313",
|
143 |
+
"121": "n03937543",
|
144 |
+
"122": "n03970156",
|
145 |
+
"123": "n03977966",
|
146 |
+
"124": "n03980874",
|
147 |
+
"125": "n03983396",
|
148 |
+
"126": "n03992509",
|
149 |
+
"127": "n04008634",
|
150 |
+
"128": "n04023962",
|
151 |
+
"129": "n04070727",
|
152 |
+
"130": "n04074963",
|
153 |
+
"131": "n04099969",
|
154 |
+
"132": "n04118538",
|
155 |
+
"133": "n04133789",
|
156 |
+
"134": "n04146614",
|
157 |
+
"135": "n04149813",
|
158 |
+
"136": "n04179913",
|
159 |
+
"137": "n04251144",
|
160 |
+
"138": "n04254777",
|
161 |
+
"139": "n04259630",
|
162 |
+
"140": "n04265275",
|
163 |
+
"141": "n04275548",
|
164 |
+
"142": "n04285008",
|
165 |
+
"143": "n04311004",
|
166 |
+
"144": "n04328186",
|
167 |
+
"145": "n04356056",
|
168 |
+
"146": "n04366367",
|
169 |
+
"147": "n04371430",
|
170 |
+
"148": "n04376876",
|
171 |
+
"149": "n04398044",
|
172 |
+
"150": "n04399382",
|
173 |
+
"151": "n04417672",
|
174 |
+
"152": "n04456115",
|
175 |
+
"153": "n04465666",
|
176 |
+
"154": "n04486054",
|
177 |
+
"155": "n04487081",
|
178 |
+
"156": "n04501370",
|
179 |
+
"157": "n04507155",
|
180 |
+
"158": "n04532106",
|
181 |
+
"159": "n04532670",
|
182 |
+
"160": "n04540053",
|
183 |
+
"161": "n04560804",
|
184 |
+
"162": "n04562935",
|
185 |
+
"163": "n04596742",
|
186 |
+
"164": "n04598010",
|
187 |
+
"165": "n06596364",
|
188 |
+
"166": "n07056680",
|
189 |
+
"167": "n07583066",
|
190 |
+
"168": "n07614500",
|
191 |
+
"169": "n07615774",
|
192 |
+
"170": "n07646821",
|
193 |
+
"171": "n07647870",
|
194 |
+
"172": "n07657664",
|
195 |
+
"173": "n07695742",
|
196 |
+
"174": "n07711569",
|
197 |
+
"175": "n07715103",
|
198 |
+
"176": "n07720875",
|
199 |
+
"177": "n07749582",
|
200 |
+
"178": "n07753592",
|
201 |
+
"179": "n07768694",
|
202 |
+
"180": "n07871810",
|
203 |
+
"181": "n07873807",
|
204 |
+
"182": "n07875152",
|
205 |
+
"183": "n07920052",
|
206 |
+
"184": "n07975909",
|
207 |
+
"185": "n08496334",
|
208 |
+
"186": "n08620881",
|
209 |
+
"187": "n08742578",
|
210 |
+
"188": "n09193705",
|
211 |
+
"189": "n09246464",
|
212 |
+
"190": "n09256479",
|
213 |
+
"191": "n09332890",
|
214 |
+
"192": "n09428293",
|
215 |
+
"193": "n12267677",
|
216 |
+
"194": "n12520864",
|
217 |
+
"195": "n13001041",
|
218 |
+
"196": "n13652335",
|
219 |
+
"197": "n13652994",
|
220 |
+
"198": "n13719102",
|
221 |
+
"199": "n14991210"
|
222 |
+
},
|
223 |
+
"label2id": {
|
224 |
+
"n01443537": 0,
|
225 |
+
"n01629819": 1,
|
226 |
+
"n01641577": 2,
|
227 |
+
"n01644900": 3,
|
228 |
+
"n01698640": 4,
|
229 |
+
"n01742172": 5,
|
230 |
+
"n01768244": 6,
|
231 |
+
"n01770393": 7,
|
232 |
+
"n01774384": 8,
|
233 |
+
"n01774750": 9,
|
234 |
+
"n01784675": 10,
|
235 |
+
"n01882714": 11,
|
236 |
+
"n01910747": 12,
|
237 |
+
"n01917289": 13,
|
238 |
+
"n01944390": 14,
|
239 |
+
"n01950731": 15,
|
240 |
+
"n01983481": 16,
|
241 |
+
"n01984695": 17,
|
242 |
+
"n02002724": 18,
|
243 |
+
"n02056570": 19,
|
244 |
+
"n02058221": 20,
|
245 |
+
"n02074367": 21,
|
246 |
+
"n02094433": 22,
|
247 |
+
"n02099601": 23,
|
248 |
+
"n02099712": 24,
|
249 |
+
"n02106662": 25,
|
250 |
+
"n02113799": 26,
|
251 |
+
"n02123045": 27,
|
252 |
+
"n02123394": 28,
|
253 |
+
"n02124075": 29,
|
254 |
+
"n02125311": 30,
|
255 |
+
"n02129165": 31,
|
256 |
+
"n02132136": 32,
|
257 |
+
"n02165456": 33,
|
258 |
+
"n02226429": 34,
|
259 |
+
"n02231487": 35,
|
260 |
+
"n02233338": 36,
|
261 |
+
"n02236044": 37,
|
262 |
+
"n02268443": 38,
|
263 |
+
"n02279972": 39,
|
264 |
+
"n02281406": 40,
|
265 |
+
"n02321529": 41,
|
266 |
+
"n02364673": 42,
|
267 |
+
"n02395406": 43,
|
268 |
+
"n02403003": 44,
|
269 |
+
"n02410509": 45,
|
270 |
+
"n02415577": 46,
|
271 |
+
"n02423022": 47,
|
272 |
+
"n02437312": 48,
|
273 |
+
"n02480495": 49,
|
274 |
+
"n02481823": 50,
|
275 |
+
"n02486410": 51,
|
276 |
+
"n02504458": 52,
|
277 |
+
"n02509815": 53,
|
278 |
+
"n02666347": 54,
|
279 |
+
"n02669723": 55,
|
280 |
+
"n02699494": 56,
|
281 |
+
"n02769748": 57,
|
282 |
+
"n02788148": 58,
|
283 |
+
"n02791270": 59,
|
284 |
+
"n02793495": 60,
|
285 |
+
"n02795169": 61,
|
286 |
+
"n02802426": 62,
|
287 |
+
"n02808440": 63,
|
288 |
+
"n02814533": 64,
|
289 |
+
"n02814860": 65,
|
290 |
+
"n02815834": 66,
|
291 |
+
"n02823428": 67,
|
292 |
+
"n02837789": 68,
|
293 |
+
"n02841315": 69,
|
294 |
+
"n02843684": 70,
|
295 |
+
"n02883205": 71,
|
296 |
+
"n02892201": 72,
|
297 |
+
"n02909870": 73,
|
298 |
+
"n02917067": 74,
|
299 |
+
"n02927161": 75,
|
300 |
+
"n02948072": 76,
|
301 |
+
"n02950826": 77,
|
302 |
+
"n02963159": 78,
|
303 |
+
"n02977058": 79,
|
304 |
+
"n02988304": 80,
|
305 |
+
"n03014705": 81,
|
306 |
+
"n03026506": 82,
|
307 |
+
"n03042490": 83,
|
308 |
+
"n03085013": 84,
|
309 |
+
"n03089624": 85,
|
310 |
+
"n03100240": 86,
|
311 |
+
"n03126707": 87,
|
312 |
+
"n03160309": 88,
|
313 |
+
"n03179701": 89,
|
314 |
+
"n03201208": 90,
|
315 |
+
"n03255030": 91,
|
316 |
+
"n03355925": 92,
|
317 |
+
"n03373237": 93,
|
318 |
+
"n03388043": 94,
|
319 |
+
"n03393912": 95,
|
320 |
+
"n03400231": 96,
|
321 |
+
"n03404251": 97,
|
322 |
+
"n03424325": 98,
|
323 |
+
"n03444034": 99,
|
324 |
+
"n03447447": 100,
|
325 |
+
"n03544143": 101,
|
326 |
+
"n03584254": 102,
|
327 |
+
"n03599486": 103,
|
328 |
+
"n03617480": 104,
|
329 |
+
"n03637318": 105,
|
330 |
+
"n03649909": 106,
|
331 |
+
"n03662601": 107,
|
332 |
+
"n03670208": 108,
|
333 |
+
"n03706229": 109,
|
334 |
+
"n03733131": 110,
|
335 |
+
"n03763968": 111,
|
336 |
+
"n03770439": 112,
|
337 |
+
"n03796401": 113,
|
338 |
+
"n03814639": 114,
|
339 |
+
"n03837869": 115,
|
340 |
+
"n03838899": 116,
|
341 |
+
"n03854065": 117,
|
342 |
+
"n03891332": 118,
|
343 |
+
"n03902125": 119,
|
344 |
+
"n03930313": 120,
|
345 |
+
"n03937543": 121,
|
346 |
+
"n03970156": 122,
|
347 |
+
"n03977966": 123,
|
348 |
+
"n03980874": 124,
|
349 |
+
"n03983396": 125,
|
350 |
+
"n03992509": 126,
|
351 |
+
"n04008634": 127,
|
352 |
+
"n04023962": 128,
|
353 |
+
"n04070727": 129,
|
354 |
+
"n04074963": 130,
|
355 |
+
"n04099969": 131,
|
356 |
+
"n04118538": 132,
|
357 |
+
"n04133789": 133,
|
358 |
+
"n04146614": 134,
|
359 |
+
"n04149813": 135,
|
360 |
+
"n04179913": 136,
|
361 |
+
"n04251144": 137,
|
362 |
+
"n04254777": 138,
|
363 |
+
"n04259630": 139,
|
364 |
+
"n04265275": 140,
|
365 |
+
"n04275548": 141,
|
366 |
+
"n04285008": 142,
|
367 |
+
"n04311004": 143,
|
368 |
+
"n04328186": 144,
|
369 |
+
"n04356056": 145,
|
370 |
+
"n04366367": 146,
|
371 |
+
"n04371430": 147,
|
372 |
+
"n04376876": 148,
|
373 |
+
"n04398044": 149,
|
374 |
+
"n04399382": 150,
|
375 |
+
"n04417672": 151,
|
376 |
+
"n04456115": 152,
|
377 |
+
"n04465666": 153,
|
378 |
+
"n04486054": 154,
|
379 |
+
"n04487081": 155,
|
380 |
+
"n04501370": 156,
|
381 |
+
"n04507155": 157,
|
382 |
+
"n04532106": 158,
|
383 |
+
"n04532670": 159,
|
384 |
+
"n04540053": 160,
|
385 |
+
"n04560804": 161,
|
386 |
+
"n04562935": 162,
|
387 |
+
"n04596742": 163,
|
388 |
+
"n04598010": 164,
|
389 |
+
"n06596364": 165,
|
390 |
+
"n07056680": 166,
|
391 |
+
"n07583066": 167,
|
392 |
+
"n07614500": 168,
|
393 |
+
"n07615774": 169,
|
394 |
+
"n07646821": 170,
|
395 |
+
"n07647870": 171,
|
396 |
+
"n07657664": 172,
|
397 |
+
"n07695742": 173,
|
398 |
+
"n07711569": 174,
|
399 |
+
"n07715103": 175,
|
400 |
+
"n07720875": 176,
|
401 |
+
"n07749582": 177,
|
402 |
+
"n07753592": 178,
|
403 |
+
"n07768694": 179,
|
404 |
+
"n07871810": 180,
|
405 |
+
"n07873807": 181,
|
406 |
+
"n07875152": 182,
|
407 |
+
"n07920052": 183,
|
408 |
+
"n07975909": 184,
|
409 |
+
"n08496334": 185,
|
410 |
+
"n08620881": 186,
|
411 |
+
"n08742578": 187,
|
412 |
+
"n09193705": 188,
|
413 |
+
"n09246464": 189,
|
414 |
+
"n09256479": 190,
|
415 |
+
"n09332890": 191,
|
416 |
+
"n09428293": 192,
|
417 |
+
"n12267677": 193,
|
418 |
+
"n12520864": 194,
|
419 |
+
"n13001041": 195,
|
420 |
+
"n13652335": 196,
|
421 |
+
"n13652994": 197,
|
422 |
+
"n13719102": 198,
|
423 |
+
"n14991210": 199
|
424 |
+
},
|
425 |
+
"layer_type": "basic",
|
426 |
+
"model_type": "resnet",
|
427 |
+
"num_channels": 3,
|
428 |
+
"out_features": null,
|
429 |
+
"problem_type": "single_label_classification",
|
430 |
+
"stage_names": [
|
431 |
+
"stem",
|
432 |
+
"stage1",
|
433 |
+
"stage2",
|
434 |
+
"stage3",
|
435 |
+
"stage4"
|
436 |
+
],
|
437 |
+
"torch_dtype": "float32",
|
438 |
+
"transformers_version": "4.26.1"
|
439 |
+
}
|
eval_results.json
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 49.95,
|
3 |
+
"eval_accuracy": 0.8297674418604651,
|
4 |
+
"eval_loss": 0.6420783400535583,
|
5 |
+
"eval_runtime": 6.0573,
|
6 |
+
"eval_samples_per_second": 354.943,
|
7 |
+
"eval_steps_per_second": 0.825
|
8 |
+
}
|
preprocessor_config.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"crop_pct": 0.875,
|
3 |
+
"do_normalize": true,
|
4 |
+
"do_rescale": true,
|
5 |
+
"do_resize": true,
|
6 |
+
"feature_extractor_type": "ConvNextFeatureExtractor",
|
7 |
+
"image_mean": [
|
8 |
+
0.485,
|
9 |
+
0.456,
|
10 |
+
0.406
|
11 |
+
],
|
12 |
+
"image_processor_type": "ConvNextFeatureExtractor",
|
13 |
+
"image_std": [
|
14 |
+
0.229,
|
15 |
+
0.224,
|
16 |
+
0.225
|
17 |
+
],
|
18 |
+
"resample": 3,
|
19 |
+
"rescale_factor": 0.00392156862745098,
|
20 |
+
"size": {
|
21 |
+
"shortest_edge": 224
|
22 |
+
}
|
23 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0aa8d5652935be63cda6defc0428de0798b7c6501ee1307eb5a6c531f8df92b3
|
3 |
+
size 45197877
|
train_results.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 49.95,
|
3 |
+
"train_loss": 1.65314315032959,
|
4 |
+
"train_runtime": 3746.3389,
|
5 |
+
"train_samples_per_second": 286.947,
|
6 |
+
"train_steps_per_second": 0.133
|
7 |
+
}
|
trainer_state.json
ADDED
@@ -0,0 +1,775 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"best_metric": 0.8297674418604651,
|
3 |
+
"best_model_checkpoint": "resnet-18-finetuned-resnet-18-1/checkpoint-450",
|
4 |
+
"epoch": 49.95238095238095,
|
5 |
+
"global_step": 500,
|
6 |
+
"is_hyper_param_search": false,
|
7 |
+
"is_local_process_zero": true,
|
8 |
+
"is_world_process_zero": true,
|
9 |
+
"log_history": [
|
10 |
+
{
|
11 |
+
"epoch": 0.95,
|
12 |
+
"learning_rate": 1e-05,
|
13 |
+
"loss": 5.5547,
|
14 |
+
"step": 10
|
15 |
+
},
|
16 |
+
{
|
17 |
+
"epoch": 0.95,
|
18 |
+
"eval_accuracy": 0.004186046511627907,
|
19 |
+
"eval_loss": 5.4745025634765625,
|
20 |
+
"eval_runtime": 6.1733,
|
21 |
+
"eval_samples_per_second": 348.276,
|
22 |
+
"eval_steps_per_second": 0.81,
|
23 |
+
"step": 10
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"epoch": 1.95,
|
27 |
+
"learning_rate": 2e-05,
|
28 |
+
"loss": 5.668,
|
29 |
+
"step": 20
|
30 |
+
},
|
31 |
+
{
|
32 |
+
"epoch": 1.95,
|
33 |
+
"eval_accuracy": 0.019069767441860466,
|
34 |
+
"eval_loss": 5.200634479522705,
|
35 |
+
"eval_runtime": 5.9989,
|
36 |
+
"eval_samples_per_second": 358.401,
|
37 |
+
"eval_steps_per_second": 0.833,
|
38 |
+
"step": 20
|
39 |
+
},
|
40 |
+
{
|
41 |
+
"epoch": 2.95,
|
42 |
+
"learning_rate": 3e-05,
|
43 |
+
"loss": 5.3055,
|
44 |
+
"step": 30
|
45 |
+
},
|
46 |
+
{
|
47 |
+
"epoch": 2.95,
|
48 |
+
"eval_accuracy": 0.09023255813953489,
|
49 |
+
"eval_loss": 4.731231212615967,
|
50 |
+
"eval_runtime": 5.9716,
|
51 |
+
"eval_samples_per_second": 360.039,
|
52 |
+
"eval_steps_per_second": 0.837,
|
53 |
+
"step": 30
|
54 |
+
},
|
55 |
+
{
|
56 |
+
"epoch": 3.95,
|
57 |
+
"learning_rate": 4e-05,
|
58 |
+
"loss": 4.7641,
|
59 |
+
"step": 40
|
60 |
+
},
|
61 |
+
{
|
62 |
+
"epoch": 3.95,
|
63 |
+
"eval_accuracy": 0.21813953488372093,
|
64 |
+
"eval_loss": 4.053351879119873,
|
65 |
+
"eval_runtime": 5.9761,
|
66 |
+
"eval_samples_per_second": 359.764,
|
67 |
+
"eval_steps_per_second": 0.837,
|
68 |
+
"step": 40
|
69 |
+
},
|
70 |
+
{
|
71 |
+
"epoch": 4.95,
|
72 |
+
"learning_rate": 5e-05,
|
73 |
+
"loss": 4.0761,
|
74 |
+
"step": 50
|
75 |
+
},
|
76 |
+
{
|
77 |
+
"epoch": 4.95,
|
78 |
+
"eval_accuracy": 0.34325581395348836,
|
79 |
+
"eval_loss": 3.1956703662872314,
|
80 |
+
"eval_runtime": 6.1144,
|
81 |
+
"eval_samples_per_second": 351.631,
|
82 |
+
"eval_steps_per_second": 0.818,
|
83 |
+
"step": 50
|
84 |
+
},
|
85 |
+
{
|
86 |
+
"epoch": 5.95,
|
87 |
+
"learning_rate": 4.888888888888889e-05,
|
88 |
+
"loss": 3.3846,
|
89 |
+
"step": 60
|
90 |
+
},
|
91 |
+
{
|
92 |
+
"epoch": 5.95,
|
93 |
+
"eval_accuracy": 0.4786046511627907,
|
94 |
+
"eval_loss": 2.493035078048706,
|
95 |
+
"eval_runtime": 5.9735,
|
96 |
+
"eval_samples_per_second": 359.926,
|
97 |
+
"eval_steps_per_second": 0.837,
|
98 |
+
"step": 60
|
99 |
+
},
|
100 |
+
{
|
101 |
+
"epoch": 6.95,
|
102 |
+
"learning_rate": 4.7777777777777784e-05,
|
103 |
+
"loss": 2.8319,
|
104 |
+
"step": 70
|
105 |
+
},
|
106 |
+
{
|
107 |
+
"epoch": 6.95,
|
108 |
+
"eval_accuracy": 0.5623255813953488,
|
109 |
+
"eval_loss": 2.0251505374908447,
|
110 |
+
"eval_runtime": 5.9942,
|
111 |
+
"eval_samples_per_second": 358.678,
|
112 |
+
"eval_steps_per_second": 0.834,
|
113 |
+
"step": 70
|
114 |
+
},
|
115 |
+
{
|
116 |
+
"epoch": 7.95,
|
117 |
+
"learning_rate": 4.666666666666667e-05,
|
118 |
+
"loss": 2.4358,
|
119 |
+
"step": 80
|
120 |
+
},
|
121 |
+
{
|
122 |
+
"epoch": 7.95,
|
123 |
+
"eval_accuracy": 0.6334883720930232,
|
124 |
+
"eval_loss": 1.6915886402130127,
|
125 |
+
"eval_runtime": 5.9886,
|
126 |
+
"eval_samples_per_second": 359.016,
|
127 |
+
"eval_steps_per_second": 0.835,
|
128 |
+
"step": 80
|
129 |
+
},
|
130 |
+
{
|
131 |
+
"epoch": 8.95,
|
132 |
+
"learning_rate": 4.555555555555556e-05,
|
133 |
+
"loss": 2.1433,
|
134 |
+
"step": 90
|
135 |
+
},
|
136 |
+
{
|
137 |
+
"epoch": 8.95,
|
138 |
+
"eval_accuracy": 0.6637209302325582,
|
139 |
+
"eval_loss": 1.456493616104126,
|
140 |
+
"eval_runtime": 6.0018,
|
141 |
+
"eval_samples_per_second": 358.223,
|
142 |
+
"eval_steps_per_second": 0.833,
|
143 |
+
"step": 90
|
144 |
+
},
|
145 |
+
{
|
146 |
+
"epoch": 9.95,
|
147 |
+
"learning_rate": 4.4444444444444447e-05,
|
148 |
+
"loss": 1.9315,
|
149 |
+
"step": 100
|
150 |
+
},
|
151 |
+
{
|
152 |
+
"epoch": 9.95,
|
153 |
+
"eval_accuracy": 0.7013953488372093,
|
154 |
+
"eval_loss": 1.2676024436950684,
|
155 |
+
"eval_runtime": 5.9984,
|
156 |
+
"eval_samples_per_second": 358.426,
|
157 |
+
"eval_steps_per_second": 0.834,
|
158 |
+
"step": 100
|
159 |
+
},
|
160 |
+
{
|
161 |
+
"epoch": 10.95,
|
162 |
+
"learning_rate": 4.3333333333333334e-05,
|
163 |
+
"loss": 1.7746,
|
164 |
+
"step": 110
|
165 |
+
},
|
166 |
+
{
|
167 |
+
"epoch": 10.95,
|
168 |
+
"eval_accuracy": 0.7237209302325581,
|
169 |
+
"eval_loss": 1.1530412435531616,
|
170 |
+
"eval_runtime": 5.9931,
|
171 |
+
"eval_samples_per_second": 358.749,
|
172 |
+
"eval_steps_per_second": 0.834,
|
173 |
+
"step": 110
|
174 |
+
},
|
175 |
+
{
|
176 |
+
"epoch": 11.95,
|
177 |
+
"learning_rate": 4.222222222222222e-05,
|
178 |
+
"loss": 1.6467,
|
179 |
+
"step": 120
|
180 |
+
},
|
181 |
+
{
|
182 |
+
"epoch": 11.95,
|
183 |
+
"eval_accuracy": 0.74,
|
184 |
+
"eval_loss": 1.0685006380081177,
|
185 |
+
"eval_runtime": 6.2755,
|
186 |
+
"eval_samples_per_second": 342.602,
|
187 |
+
"eval_steps_per_second": 0.797,
|
188 |
+
"step": 120
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"epoch": 12.95,
|
192 |
+
"learning_rate": 4.111111111111111e-05,
|
193 |
+
"loss": 1.546,
|
194 |
+
"step": 130
|
195 |
+
},
|
196 |
+
{
|
197 |
+
"epoch": 12.95,
|
198 |
+
"eval_accuracy": 0.7572093023255814,
|
199 |
+
"eval_loss": 0.999431312084198,
|
200 |
+
"eval_runtime": 5.987,
|
201 |
+
"eval_samples_per_second": 359.114,
|
202 |
+
"eval_steps_per_second": 0.835,
|
203 |
+
"step": 130
|
204 |
+
},
|
205 |
+
{
|
206 |
+
"epoch": 13.95,
|
207 |
+
"learning_rate": 4e-05,
|
208 |
+
"loss": 1.4734,
|
209 |
+
"step": 140
|
210 |
+
},
|
211 |
+
{
|
212 |
+
"epoch": 13.95,
|
213 |
+
"eval_accuracy": 0.7660465116279069,
|
214 |
+
"eval_loss": 0.946732223033905,
|
215 |
+
"eval_runtime": 5.9853,
|
216 |
+
"eval_samples_per_second": 359.213,
|
217 |
+
"eval_steps_per_second": 0.835,
|
218 |
+
"step": 140
|
219 |
+
},
|
220 |
+
{
|
221 |
+
"epoch": 14.95,
|
222 |
+
"learning_rate": 3.888888888888889e-05,
|
223 |
+
"loss": 1.4163,
|
224 |
+
"step": 150
|
225 |
+
},
|
226 |
+
{
|
227 |
+
"epoch": 14.95,
|
228 |
+
"eval_accuracy": 0.7753488372093024,
|
229 |
+
"eval_loss": 0.9061232209205627,
|
230 |
+
"eval_runtime": 5.9975,
|
231 |
+
"eval_samples_per_second": 358.485,
|
232 |
+
"eval_steps_per_second": 0.834,
|
233 |
+
"step": 150
|
234 |
+
},
|
235 |
+
{
|
236 |
+
"epoch": 15.95,
|
237 |
+
"learning_rate": 3.777777777777778e-05,
|
238 |
+
"loss": 1.3593,
|
239 |
+
"step": 160
|
240 |
+
},
|
241 |
+
{
|
242 |
+
"epoch": 15.95,
|
243 |
+
"eval_accuracy": 0.7832558139534884,
|
244 |
+
"eval_loss": 0.8717327117919922,
|
245 |
+
"eval_runtime": 5.9972,
|
246 |
+
"eval_samples_per_second": 358.502,
|
247 |
+
"eval_steps_per_second": 0.834,
|
248 |
+
"step": 160
|
249 |
+
},
|
250 |
+
{
|
251 |
+
"epoch": 16.95,
|
252 |
+
"learning_rate": 3.6666666666666666e-05,
|
253 |
+
"loss": 1.3129,
|
254 |
+
"step": 170
|
255 |
+
},
|
256 |
+
{
|
257 |
+
"epoch": 16.95,
|
258 |
+
"eval_accuracy": 0.7902325581395349,
|
259 |
+
"eval_loss": 0.8438239097595215,
|
260 |
+
"eval_runtime": 5.9855,
|
261 |
+
"eval_samples_per_second": 359.203,
|
262 |
+
"eval_steps_per_second": 0.835,
|
263 |
+
"step": 170
|
264 |
+
},
|
265 |
+
{
|
266 |
+
"epoch": 17.95,
|
267 |
+
"learning_rate": 3.555555555555556e-05,
|
268 |
+
"loss": 1.2843,
|
269 |
+
"step": 180
|
270 |
+
},
|
271 |
+
{
|
272 |
+
"epoch": 17.95,
|
273 |
+
"eval_accuracy": 0.7986046511627907,
|
274 |
+
"eval_loss": 0.8182681798934937,
|
275 |
+
"eval_runtime": 5.985,
|
276 |
+
"eval_samples_per_second": 359.23,
|
277 |
+
"eval_steps_per_second": 0.835,
|
278 |
+
"step": 180
|
279 |
+
},
|
280 |
+
{
|
281 |
+
"epoch": 18.95,
|
282 |
+
"learning_rate": 3.444444444444445e-05,
|
283 |
+
"loss": 1.2527,
|
284 |
+
"step": 190
|
285 |
+
},
|
286 |
+
{
|
287 |
+
"epoch": 18.95,
|
288 |
+
"eval_accuracy": 0.804186046511628,
|
289 |
+
"eval_loss": 0.8000912070274353,
|
290 |
+
"eval_runtime": 5.9829,
|
291 |
+
"eval_samples_per_second": 359.357,
|
292 |
+
"eval_steps_per_second": 0.836,
|
293 |
+
"step": 190
|
294 |
+
},
|
295 |
+
{
|
296 |
+
"epoch": 19.95,
|
297 |
+
"learning_rate": 3.3333333333333335e-05,
|
298 |
+
"loss": 1.2127,
|
299 |
+
"step": 200
|
300 |
+
},
|
301 |
+
{
|
302 |
+
"epoch": 19.95,
|
303 |
+
"eval_accuracy": 0.804186046511628,
|
304 |
+
"eval_loss": 0.7860467433929443,
|
305 |
+
"eval_runtime": 5.9859,
|
306 |
+
"eval_samples_per_second": 359.177,
|
307 |
+
"eval_steps_per_second": 0.835,
|
308 |
+
"step": 200
|
309 |
+
},
|
310 |
+
{
|
311 |
+
"epoch": 20.95,
|
312 |
+
"learning_rate": 3.222222222222223e-05,
|
313 |
+
"loss": 1.1854,
|
314 |
+
"step": 210
|
315 |
+
},
|
316 |
+
{
|
317 |
+
"epoch": 20.95,
|
318 |
+
"eval_accuracy": 0.8093023255813954,
|
319 |
+
"eval_loss": 0.7675830125808716,
|
320 |
+
"eval_runtime": 6.0054,
|
321 |
+
"eval_samples_per_second": 358.008,
|
322 |
+
"eval_steps_per_second": 0.833,
|
323 |
+
"step": 210
|
324 |
+
},
|
325 |
+
{
|
326 |
+
"epoch": 21.95,
|
327 |
+
"learning_rate": 3.111111111111111e-05,
|
328 |
+
"loss": 1.1574,
|
329 |
+
"step": 220
|
330 |
+
},
|
331 |
+
{
|
332 |
+
"epoch": 21.95,
|
333 |
+
"eval_accuracy": 0.8079069767441861,
|
334 |
+
"eval_loss": 0.7555623650550842,
|
335 |
+
"eval_runtime": 5.9751,
|
336 |
+
"eval_samples_per_second": 359.829,
|
337 |
+
"eval_steps_per_second": 0.837,
|
338 |
+
"step": 220
|
339 |
+
},
|
340 |
+
{
|
341 |
+
"epoch": 22.95,
|
342 |
+
"learning_rate": 3e-05,
|
343 |
+
"loss": 1.1283,
|
344 |
+
"step": 230
|
345 |
+
},
|
346 |
+
{
|
347 |
+
"epoch": 22.95,
|
348 |
+
"eval_accuracy": 0.8130232558139535,
|
349 |
+
"eval_loss": 0.7396910190582275,
|
350 |
+
"eval_runtime": 5.9779,
|
351 |
+
"eval_samples_per_second": 359.658,
|
352 |
+
"eval_steps_per_second": 0.836,
|
353 |
+
"step": 230
|
354 |
+
},
|
355 |
+
{
|
356 |
+
"epoch": 23.95,
|
357 |
+
"learning_rate": 2.8888888888888888e-05,
|
358 |
+
"loss": 1.1302,
|
359 |
+
"step": 240
|
360 |
+
},
|
361 |
+
{
|
362 |
+
"epoch": 23.95,
|
363 |
+
"eval_accuracy": 0.8111627906976744,
|
364 |
+
"eval_loss": 0.7319375276565552,
|
365 |
+
"eval_runtime": 5.9855,
|
366 |
+
"eval_samples_per_second": 359.199,
|
367 |
+
"eval_steps_per_second": 0.835,
|
368 |
+
"step": 240
|
369 |
+
},
|
370 |
+
{
|
371 |
+
"epoch": 24.95,
|
372 |
+
"learning_rate": 2.777777777777778e-05,
|
373 |
+
"loss": 1.1032,
|
374 |
+
"step": 250
|
375 |
+
},
|
376 |
+
{
|
377 |
+
"epoch": 24.95,
|
378 |
+
"eval_accuracy": 0.8176744186046512,
|
379 |
+
"eval_loss": 0.718932569026947,
|
380 |
+
"eval_runtime": 5.977,
|
381 |
+
"eval_samples_per_second": 359.71,
|
382 |
+
"eval_steps_per_second": 0.837,
|
383 |
+
"step": 250
|
384 |
+
},
|
385 |
+
{
|
386 |
+
"epoch": 25.95,
|
387 |
+
"learning_rate": 2.6666666666666667e-05,
|
388 |
+
"loss": 1.0891,
|
389 |
+
"step": 260
|
390 |
+
},
|
391 |
+
{
|
392 |
+
"epoch": 25.95,
|
393 |
+
"eval_accuracy": 0.82,
|
394 |
+
"eval_loss": 0.7135240435600281,
|
395 |
+
"eval_runtime": 5.9814,
|
396 |
+
"eval_samples_per_second": 359.449,
|
397 |
+
"eval_steps_per_second": 0.836,
|
398 |
+
"step": 260
|
399 |
+
},
|
400 |
+
{
|
401 |
+
"epoch": 26.95,
|
402 |
+
"learning_rate": 2.5555555555555554e-05,
|
403 |
+
"loss": 1.0738,
|
404 |
+
"step": 270
|
405 |
+
},
|
406 |
+
{
|
407 |
+
"epoch": 26.95,
|
408 |
+
"eval_accuracy": 0.8195348837209302,
|
409 |
+
"eval_loss": 0.7008457779884338,
|
410 |
+
"eval_runtime": 5.9894,
|
411 |
+
"eval_samples_per_second": 358.966,
|
412 |
+
"eval_steps_per_second": 0.835,
|
413 |
+
"step": 270
|
414 |
+
},
|
415 |
+
{
|
416 |
+
"epoch": 27.95,
|
417 |
+
"learning_rate": 2.4444444444444445e-05,
|
418 |
+
"loss": 1.0665,
|
419 |
+
"step": 280
|
420 |
+
},
|
421 |
+
{
|
422 |
+
"epoch": 27.95,
|
423 |
+
"eval_accuracy": 0.8218604651162791,
|
424 |
+
"eval_loss": 0.6941251754760742,
|
425 |
+
"eval_runtime": 5.9997,
|
426 |
+
"eval_samples_per_second": 358.352,
|
427 |
+
"eval_steps_per_second": 0.833,
|
428 |
+
"step": 280
|
429 |
+
},
|
430 |
+
{
|
431 |
+
"epoch": 28.95,
|
432 |
+
"learning_rate": 2.3333333333333336e-05,
|
433 |
+
"loss": 1.0354,
|
434 |
+
"step": 290
|
435 |
+
},
|
436 |
+
{
|
437 |
+
"epoch": 28.95,
|
438 |
+
"eval_accuracy": 0.8223255813953488,
|
439 |
+
"eval_loss": 0.690199613571167,
|
440 |
+
"eval_runtime": 5.992,
|
441 |
+
"eval_samples_per_second": 358.814,
|
442 |
+
"eval_steps_per_second": 0.834,
|
443 |
+
"step": 290
|
444 |
+
},
|
445 |
+
{
|
446 |
+
"epoch": 29.95,
|
447 |
+
"learning_rate": 2.2222222222222223e-05,
|
448 |
+
"loss": 1.0404,
|
449 |
+
"step": 300
|
450 |
+
},
|
451 |
+
{
|
452 |
+
"epoch": 29.95,
|
453 |
+
"eval_accuracy": 0.8237209302325581,
|
454 |
+
"eval_loss": 0.684846043586731,
|
455 |
+
"eval_runtime": 5.9904,
|
456 |
+
"eval_samples_per_second": 358.908,
|
457 |
+
"eval_steps_per_second": 0.835,
|
458 |
+
"step": 300
|
459 |
+
},
|
460 |
+
{
|
461 |
+
"epoch": 30.95,
|
462 |
+
"learning_rate": 2.111111111111111e-05,
|
463 |
+
"loss": 1.0251,
|
464 |
+
"step": 310
|
465 |
+
},
|
466 |
+
{
|
467 |
+
"epoch": 30.95,
|
468 |
+
"eval_accuracy": 0.8218604651162791,
|
469 |
+
"eval_loss": 0.678667426109314,
|
470 |
+
"eval_runtime": 5.9867,
|
471 |
+
"eval_samples_per_second": 359.129,
|
472 |
+
"eval_steps_per_second": 0.835,
|
473 |
+
"step": 310
|
474 |
+
},
|
475 |
+
{
|
476 |
+
"epoch": 31.95,
|
477 |
+
"learning_rate": 2e-05,
|
478 |
+
"loss": 1.0127,
|
479 |
+
"step": 320
|
480 |
+
},
|
481 |
+
{
|
482 |
+
"epoch": 31.95,
|
483 |
+
"eval_accuracy": 0.8246511627906977,
|
484 |
+
"eval_loss": 0.673900306224823,
|
485 |
+
"eval_runtime": 5.9929,
|
486 |
+
"eval_samples_per_second": 358.756,
|
487 |
+
"eval_steps_per_second": 0.834,
|
488 |
+
"step": 320
|
489 |
+
},
|
490 |
+
{
|
491 |
+
"epoch": 32.95,
|
492 |
+
"learning_rate": 1.888888888888889e-05,
|
493 |
+
"loss": 1.0023,
|
494 |
+
"step": 330
|
495 |
+
},
|
496 |
+
{
|
497 |
+
"epoch": 32.95,
|
498 |
+
"eval_accuracy": 0.8255813953488372,
|
499 |
+
"eval_loss": 0.6712960004806519,
|
500 |
+
"eval_runtime": 5.9994,
|
501 |
+
"eval_samples_per_second": 358.368,
|
502 |
+
"eval_steps_per_second": 0.833,
|
503 |
+
"step": 330
|
504 |
+
},
|
505 |
+
{
|
506 |
+
"epoch": 33.95,
|
507 |
+
"learning_rate": 1.777777777777778e-05,
|
508 |
+
"loss": 1.0012,
|
509 |
+
"step": 340
|
510 |
+
},
|
511 |
+
{
|
512 |
+
"epoch": 33.95,
|
513 |
+
"eval_accuracy": 0.8246511627906977,
|
514 |
+
"eval_loss": 0.6670580506324768,
|
515 |
+
"eval_runtime": 6.0038,
|
516 |
+
"eval_samples_per_second": 358.108,
|
517 |
+
"eval_steps_per_second": 0.833,
|
518 |
+
"step": 340
|
519 |
+
},
|
520 |
+
{
|
521 |
+
"epoch": 34.95,
|
522 |
+
"learning_rate": 1.6666666666666667e-05,
|
523 |
+
"loss": 0.9835,
|
524 |
+
"step": 350
|
525 |
+
},
|
526 |
+
{
|
527 |
+
"epoch": 34.95,
|
528 |
+
"eval_accuracy": 0.8251162790697675,
|
529 |
+
"eval_loss": 0.6612110733985901,
|
530 |
+
"eval_runtime": 5.9862,
|
531 |
+
"eval_samples_per_second": 359.158,
|
532 |
+
"eval_steps_per_second": 0.835,
|
533 |
+
"step": 350
|
534 |
+
},
|
535 |
+
{
|
536 |
+
"epoch": 35.95,
|
537 |
+
"learning_rate": 1.5555555555555555e-05,
|
538 |
+
"loss": 0.982,
|
539 |
+
"step": 360
|
540 |
+
},
|
541 |
+
{
|
542 |
+
"epoch": 35.95,
|
543 |
+
"eval_accuracy": 0.8251162790697675,
|
544 |
+
"eval_loss": 0.6587132215499878,
|
545 |
+
"eval_runtime": 6.0051,
|
546 |
+
"eval_samples_per_second": 358.031,
|
547 |
+
"eval_steps_per_second": 0.833,
|
548 |
+
"step": 360
|
549 |
+
},
|
550 |
+
{
|
551 |
+
"epoch": 36.95,
|
552 |
+
"learning_rate": 1.4444444444444444e-05,
|
553 |
+
"loss": 0.9849,
|
554 |
+
"step": 370
|
555 |
+
},
|
556 |
+
{
|
557 |
+
"epoch": 36.95,
|
558 |
+
"eval_accuracy": 0.8251162790697675,
|
559 |
+
"eval_loss": 0.6563166975975037,
|
560 |
+
"eval_runtime": 6.0073,
|
561 |
+
"eval_samples_per_second": 357.899,
|
562 |
+
"eval_steps_per_second": 0.832,
|
563 |
+
"step": 370
|
564 |
+
},
|
565 |
+
{
|
566 |
+
"epoch": 37.95,
|
567 |
+
"learning_rate": 1.3333333333333333e-05,
|
568 |
+
"loss": 0.9645,
|
569 |
+
"step": 380
|
570 |
+
},
|
571 |
+
{
|
572 |
+
"epoch": 37.95,
|
573 |
+
"eval_accuracy": 0.8232558139534883,
|
574 |
+
"eval_loss": 0.652894914150238,
|
575 |
+
"eval_runtime": 6.0024,
|
576 |
+
"eval_samples_per_second": 358.193,
|
577 |
+
"eval_steps_per_second": 0.833,
|
578 |
+
"step": 380
|
579 |
+
},
|
580 |
+
{
|
581 |
+
"epoch": 38.95,
|
582 |
+
"learning_rate": 1.2222222222222222e-05,
|
583 |
+
"loss": 0.947,
|
584 |
+
"step": 390
|
585 |
+
},
|
586 |
+
{
|
587 |
+
"epoch": 38.95,
|
588 |
+
"eval_accuracy": 0.8283720930232558,
|
589 |
+
"eval_loss": 0.6512119770050049,
|
590 |
+
"eval_runtime": 5.9854,
|
591 |
+
"eval_samples_per_second": 359.208,
|
592 |
+
"eval_steps_per_second": 0.835,
|
593 |
+
"step": 390
|
594 |
+
},
|
595 |
+
{
|
596 |
+
"epoch": 39.95,
|
597 |
+
"learning_rate": 1.1111111111111112e-05,
|
598 |
+
"loss": 0.9563,
|
599 |
+
"step": 400
|
600 |
+
},
|
601 |
+
{
|
602 |
+
"epoch": 39.95,
|
603 |
+
"eval_accuracy": 0.8265116279069767,
|
604 |
+
"eval_loss": 0.6485108733177185,
|
605 |
+
"eval_runtime": 5.9994,
|
606 |
+
"eval_samples_per_second": 358.371,
|
607 |
+
"eval_steps_per_second": 0.833,
|
608 |
+
"step": 400
|
609 |
+
},
|
610 |
+
{
|
611 |
+
"epoch": 40.95,
|
612 |
+
"learning_rate": 1e-05,
|
613 |
+
"loss": 0.9619,
|
614 |
+
"step": 410
|
615 |
+
},
|
616 |
+
{
|
617 |
+
"epoch": 40.95,
|
618 |
+
"eval_accuracy": 0.826046511627907,
|
619 |
+
"eval_loss": 0.6457317471504211,
|
620 |
+
"eval_runtime": 5.9978,
|
621 |
+
"eval_samples_per_second": 358.466,
|
622 |
+
"eval_steps_per_second": 0.834,
|
623 |
+
"step": 410
|
624 |
+
},
|
625 |
+
{
|
626 |
+
"epoch": 41.95,
|
627 |
+
"learning_rate": 8.88888888888889e-06,
|
628 |
+
"loss": 0.9399,
|
629 |
+
"step": 420
|
630 |
+
},
|
631 |
+
{
|
632 |
+
"epoch": 41.95,
|
633 |
+
"eval_accuracy": 0.8283720930232558,
|
634 |
+
"eval_loss": 0.6446535587310791,
|
635 |
+
"eval_runtime": 5.9698,
|
636 |
+
"eval_samples_per_second": 360.144,
|
637 |
+
"eval_steps_per_second": 0.838,
|
638 |
+
"step": 420
|
639 |
+
},
|
640 |
+
{
|
641 |
+
"epoch": 42.95,
|
642 |
+
"learning_rate": 7.777777777777777e-06,
|
643 |
+
"loss": 0.9423,
|
644 |
+
"step": 430
|
645 |
+
},
|
646 |
+
{
|
647 |
+
"epoch": 42.95,
|
648 |
+
"eval_accuracy": 0.8288372093023256,
|
649 |
+
"eval_loss": 0.6421455144882202,
|
650 |
+
"eval_runtime": 5.9718,
|
651 |
+
"eval_samples_per_second": 360.027,
|
652 |
+
"eval_steps_per_second": 0.837,
|
653 |
+
"step": 430
|
654 |
+
},
|
655 |
+
{
|
656 |
+
"epoch": 43.95,
|
657 |
+
"learning_rate": 6.666666666666667e-06,
|
658 |
+
"loss": 0.9482,
|
659 |
+
"step": 440
|
660 |
+
},
|
661 |
+
{
|
662 |
+
"epoch": 43.95,
|
663 |
+
"eval_accuracy": 0.8283720930232558,
|
664 |
+
"eval_loss": 0.6426512002944946,
|
665 |
+
"eval_runtime": 6.012,
|
666 |
+
"eval_samples_per_second": 357.619,
|
667 |
+
"eval_steps_per_second": 0.832,
|
668 |
+
"step": 440
|
669 |
+
},
|
670 |
+
{
|
671 |
+
"epoch": 44.95,
|
672 |
+
"learning_rate": 5.555555555555556e-06,
|
673 |
+
"loss": 0.9315,
|
674 |
+
"step": 450
|
675 |
+
},
|
676 |
+
{
|
677 |
+
"epoch": 44.95,
|
678 |
+
"eval_accuracy": 0.8297674418604651,
|
679 |
+
"eval_loss": 0.6420783400535583,
|
680 |
+
"eval_runtime": 6.2892,
|
681 |
+
"eval_samples_per_second": 341.856,
|
682 |
+
"eval_steps_per_second": 0.795,
|
683 |
+
"step": 450
|
684 |
+
},
|
685 |
+
{
|
686 |
+
"epoch": 45.95,
|
687 |
+
"learning_rate": 4.444444444444445e-06,
|
688 |
+
"loss": 0.9411,
|
689 |
+
"step": 460
|
690 |
+
},
|
691 |
+
{
|
692 |
+
"epoch": 45.95,
|
693 |
+
"eval_accuracy": 0.8293023255813954,
|
694 |
+
"eval_loss": 0.6401400566101074,
|
695 |
+
"eval_runtime": 5.9778,
|
696 |
+
"eval_samples_per_second": 359.664,
|
697 |
+
"eval_steps_per_second": 0.836,
|
698 |
+
"step": 460
|
699 |
+
},
|
700 |
+
{
|
701 |
+
"epoch": 46.95,
|
702 |
+
"learning_rate": 3.3333333333333333e-06,
|
703 |
+
"loss": 0.9249,
|
704 |
+
"step": 470
|
705 |
+
},
|
706 |
+
{
|
707 |
+
"epoch": 46.95,
|
708 |
+
"eval_accuracy": 0.8297674418604651,
|
709 |
+
"eval_loss": 0.639735758304596,
|
710 |
+
"eval_runtime": 5.9937,
|
711 |
+
"eval_samples_per_second": 358.711,
|
712 |
+
"eval_steps_per_second": 0.834,
|
713 |
+
"step": 470
|
714 |
+
},
|
715 |
+
{
|
716 |
+
"epoch": 47.95,
|
717 |
+
"learning_rate": 2.2222222222222225e-06,
|
718 |
+
"loss": 0.9361,
|
719 |
+
"step": 480
|
720 |
+
},
|
721 |
+
{
|
722 |
+
"epoch": 47.95,
|
723 |
+
"eval_accuracy": 0.8293023255813954,
|
724 |
+
"eval_loss": 0.6406731605529785,
|
725 |
+
"eval_runtime": 5.9924,
|
726 |
+
"eval_samples_per_second": 358.789,
|
727 |
+
"eval_steps_per_second": 0.834,
|
728 |
+
"step": 480
|
729 |
+
},
|
730 |
+
{
|
731 |
+
"epoch": 48.95,
|
732 |
+
"learning_rate": 1.1111111111111112e-06,
|
733 |
+
"loss": 0.952,
|
734 |
+
"step": 490
|
735 |
+
},
|
736 |
+
{
|
737 |
+
"epoch": 48.95,
|
738 |
+
"eval_accuracy": 0.8297674418604651,
|
739 |
+
"eval_loss": 0.6389557719230652,
|
740 |
+
"eval_runtime": 5.9868,
|
741 |
+
"eval_samples_per_second": 359.122,
|
742 |
+
"eval_steps_per_second": 0.835,
|
743 |
+
"step": 490
|
744 |
+
},
|
745 |
+
{
|
746 |
+
"epoch": 49.95,
|
747 |
+
"learning_rate": 0.0,
|
748 |
+
"loss": 0.9358,
|
749 |
+
"step": 500
|
750 |
+
},
|
751 |
+
{
|
752 |
+
"epoch": 49.95,
|
753 |
+
"eval_accuracy": 0.8297674418604651,
|
754 |
+
"eval_loss": 0.6392757296562195,
|
755 |
+
"eval_runtime": 6.0005,
|
756 |
+
"eval_samples_per_second": 358.304,
|
757 |
+
"eval_steps_per_second": 0.833,
|
758 |
+
"step": 500
|
759 |
+
},
|
760 |
+
{
|
761 |
+
"epoch": 49.95,
|
762 |
+
"step": 500,
|
763 |
+
"total_flos": 1.0940562332139848e+19,
|
764 |
+
"train_loss": 1.65314315032959,
|
765 |
+
"train_runtime": 3746.3389,
|
766 |
+
"train_samples_per_second": 286.947,
|
767 |
+
"train_steps_per_second": 0.133
|
768 |
+
}
|
769 |
+
],
|
770 |
+
"max_steps": 500,
|
771 |
+
"num_train_epochs": 50,
|
772 |
+
"total_flos": 1.0940562332139848e+19,
|
773 |
+
"trial_name": null,
|
774 |
+
"trial_params": null
|
775 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c48b7daf3d1583c8f860136303a6a248e4ac8d1083452c4af331e9f90a8477aa
|
3 |
+
size 3515
|