Commit
·
d179cea
1
Parent(s):
0705032
Saving weights and logs of step 288938
Browse files
events.out.tfevents.1657565171.t1v-n-f83092c9-w-0.2078193.0.v2
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f9423226e9c4ba2d8a94fd6095a67126697361bd64a44c76a2d8cf2ac8416e92
|
3 |
+
size 42777907
|
flax_model.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 498589677
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:358d45320ccd11cca9046b8fde354bd95e232b215282c863f9bfa8eac5baad7c
|
3 |
size 498589677
|
logs/2022-07-11-roberta-base.txt
CHANGED
@@ -284,3 +284,289 @@ Step... (141500/437388 | Training Loss: -9.977737136068754e-06, Learning Rate: 1
|
|
284 |
Step... (142000/437388 | Training Loss: -9.97265669866465e-06, Learning Rate: 1.3506954928743653e-05)
|
285 |
Step... (142500/437388 | Training Loss: -9.97040478978306e-06, Learning Rate: 1.3484091141435783e-05)
|
286 |
Step... (143000/437388 | Training Loss: -9.97698589344509e-06, Learning Rate: 1.3461228263622615e-05)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
284 |
Step... (142000/437388 | Training Loss: -9.97265669866465e-06, Learning Rate: 1.3506954928743653e-05)
|
285 |
Step... (142500/437388 | Training Loss: -9.97040478978306e-06, Learning Rate: 1.3484091141435783e-05)
|
286 |
Step... (143000/437388 | Training Loss: -9.97698589344509e-06, Learning Rate: 1.3461228263622615e-05)
|
287 |
+
Step... (146000/437388 | Training Loss: -9.630007298255805e-06, Learning Rate: 1.3445407603285275e-05)
|
288 |
+
Step... (146500/437388 | Training Loss: -9.965676326828543e-06, Learning Rate: 1.3422543815977406e-05)
|
289 |
+
Step... (147000/437388 | Training Loss: -9.72058842307888e-06, Learning Rate: 1.339968184765894e-05)
|
290 |
+
Step... (147500/437388 | Training Loss: -9.968893209588714e-06, Learning Rate: 1.3376818969845772e-05)
|
291 |
+
Step... (148000/437388 | Training Loss: -9.978568414226174e-06, Learning Rate: 1.3353955182537902e-05)
|
292 |
+
Step... (148500/437388 | Training Loss: -9.978695743484423e-06, Learning Rate: 1.3331092304724734e-05)
|
293 |
+
Step... (149000/437388 | Training Loss: -9.98615360003896e-06, Learning Rate: 1.3308229426911566e-05)
|
294 |
+
Step... (149500/437388 | Training Loss: -9.963588126993272e-06, Learning Rate: 1.32853674585931e-05)
|
295 |
+
Step... (150000/437388 | Training Loss: -9.972392945201136e-06, Learning Rate: 1.3262502761790529e-05)
|
296 |
+
Step... (150500/437388 | Training Loss: -9.998041605285835e-06, Learning Rate: 1.3239640793472063e-05)
|
297 |
+
Step... (151000/437388 | Training Loss: -9.98711766442284e-06, Learning Rate: 1.3216776096669491e-05)
|
298 |
+
Step... (151500/437388 | Training Loss: -9.854455129243433e-06, Learning Rate: 1.3193914128351025e-05)
|
299 |
+
Step... (152000/437388 | Training Loss: -9.965891877072863e-06, Learning Rate: 1.3171051250537857e-05)
|
300 |
+
Step... (152500/437388 | Training Loss: -9.975628927350044e-06, Learning Rate: 1.314818837272469e-05)
|
301 |
+
Step... (153000/437388 | Training Loss: -9.9787357612513e-06, Learning Rate: 1.3125325494911522e-05)
|
302 |
+
Step... (153500/437388 | Training Loss: -9.78644129645545e-06, Learning Rate: 1.3102461707603652e-05)
|
303 |
+
Step... (154000/437388 | Training Loss: -9.984996722778305e-06, Learning Rate: 1.3079599739285186e-05)
|
304 |
+
Step... (154500/437388 | Training Loss: -9.972067346097901e-06, Learning Rate: 1.3056735951977316e-05)
|
305 |
+
Step... (155000/437388 | Training Loss: -9.980914910556749e-06, Learning Rate: 1.303387398365885e-05)
|
306 |
+
Step... (155500/437388 | Training Loss: -9.970542123483028e-06, Learning Rate: 1.301101019635098e-05)
|
307 |
+
Step... (156000/437388 | Training Loss: -9.967713594960514e-06, Learning Rate: 1.2988147318537813e-05)
|
308 |
+
Step... (156500/437388 | Training Loss: -9.9806848083972e-06, Learning Rate: 1.2965284440724645e-05)
|
309 |
+
Step... (157000/437388 | Training Loss: -9.98432187770959e-06, Learning Rate: 1.2942421562911477e-05)
|
310 |
+
Step... (157500/437388 | Training Loss: -9.975950888474472e-06, Learning Rate: 1.291955959459301e-05)
|
311 |
+
Step... (158000/437388 | Training Loss: -9.968489393941127e-06, Learning Rate: 1.289669489779044e-05)
|
312 |
+
Step... (158500/437388 | Training Loss: -9.981140465242788e-06, Learning Rate: 1.2873832929471973e-05)
|
313 |
+
Step... (159000/437388 | Training Loss: -9.988563760998659e-06, Learning Rate: 1.2850970051658805e-05)
|
314 |
+
Step... (159500/437388 | Training Loss: -9.971276085707359e-06, Learning Rate: 1.2828106264350936e-05)
|
315 |
+
Step... (160000/437388 | Training Loss: -9.995190339395776e-06, Learning Rate: 1.2805243386537768e-05)
|
316 |
+
Step... (160500/437388 | Training Loss: -9.97090683085844e-06, Learning Rate: 1.27823805087246e-05)
|
317 |
+
Step... (161000/437388 | Training Loss: -9.982477422454394e-06, Learning Rate: 1.2759518540406134e-05)
|
318 |
+
Step... (161500/437388 | Training Loss: -9.980572940548882e-06, Learning Rate: 1.2736653843603563e-05)
|
319 |
+
Step... (162000/437388 | Training Loss: -9.981999028241262e-06, Learning Rate: 1.2713791875285096e-05)
|
320 |
+
Step... (162500/437388 | Training Loss: -9.979303285945207e-06, Learning Rate: 1.2690928087977227e-05)
|
321 |
+
Step... (163000/437388 | Training Loss: -9.984481948777102e-06, Learning Rate: 1.266806611965876e-05)
|
322 |
+
Step... (163500/437388 | Training Loss: -9.98444011202082e-06, Learning Rate: 1.2645202332350891e-05)
|
323 |
+
Step... (164000/437388 | Training Loss: -9.993951607611962e-06, Learning Rate: 1.2622339454537723e-05)
|
324 |
+
Step... (164500/437388 | Training Loss: -9.982698429666925e-06, Learning Rate: 1.2599476576724555e-05)
|
325 |
+
Step... (165000/437388 | Training Loss: -9.805340596358292e-06, Learning Rate: 1.2576612789416686e-05)
|
326 |
+
Step... (165500/437388 | Training Loss: -9.72814996202942e-06, Learning Rate: 1.255375082109822e-05)
|
327 |
+
Step... (166000/437388 | Training Loss: -9.966535799321719e-06, Learning Rate: 1.253088703379035e-05)
|
328 |
+
Step... (166500/437388 | Training Loss: -9.917591341945808e-06, Learning Rate: 1.2508025065471884e-05)
|
329 |
+
Step... (167000/437388 | Training Loss: -9.977657100534998e-06, Learning Rate: 1.2485161278164014e-05)
|
330 |
+
Step... (167500/437388 | Training Loss: -9.982955816667527e-06, Learning Rate: 1.2462298400350846e-05)
|
331 |
+
Step... (168000/437388 | Training Loss: -1.0105099136126228e-05, Learning Rate: 1.2439435522537678e-05)
|
332 |
+
Step... (168500/437388 | Training Loss: -9.98173163679894e-06, Learning Rate: 1.241657264472451e-05)
|
333 |
+
Step... (169000/437388 | Training Loss: -9.995868822443299e-06, Learning Rate: 1.2393710676406045e-05)
|
334 |
+
Step... (169500/437388 | Training Loss: -9.98168525256915e-06, Learning Rate: 1.2370845979603473e-05)
|
335 |
+
Step... (170000/437388 | Training Loss: -9.965764547814615e-06, Learning Rate: 1.2347984011285007e-05)
|
336 |
+
Step... (170500/437388 | Training Loss: -9.795734513318166e-06, Learning Rate: 1.2325119314482436e-05)
|
337 |
+
Step... (171000/437388 | Training Loss: -9.98737959889695e-06, Learning Rate: 1.230225734616397e-05)
|
338 |
+
Step... (171500/437388 | Training Loss: -9.990029866457917e-06, Learning Rate: 1.2279394468350802e-05)
|
339 |
+
Step... (172000/437388 | Training Loss: -9.971741746994667e-06, Learning Rate: 1.2256531590537634e-05)
|
340 |
+
Step... (172500/437388 | Training Loss: -9.986046279664151e-06, Learning Rate: 1.2233669622219168e-05)
|
341 |
+
Step... (173000/437388 | Training Loss: -9.985391443478875e-06, Learning Rate: 1.2210804925416596e-05)
|
342 |
+
Step... (173500/437388 | Training Loss: -1.0004563591792248e-05, Learning Rate: 1.218794295709813e-05)
|
343 |
+
Step... (174000/437388 | Training Loss: -9.979454262065701e-06, Learning Rate: 1.216507916979026e-05)
|
344 |
+
Step... (174500/437388 | Training Loss: -1.0027236385212746e-05, Learning Rate: 1.2142217201471794e-05)
|
345 |
+
Step... (175000/437388 | Training Loss: -9.978477464755997e-06, Learning Rate: 1.2119353414163925e-05)
|
346 |
+
Step... (175500/437388 | Training Loss: -9.809462426346727e-06, Learning Rate: 1.2096490536350757e-05)
|
347 |
+
Step... (176000/437388 | Training Loss: -9.970652172341943e-06, Learning Rate: 1.2073627658537589e-05)
|
348 |
+
Step... (176500/437388 | Training Loss: -9.984391908801626e-06, Learning Rate: 1.205076387122972e-05)
|
349 |
+
Step... (177000/437388 | Training Loss: -9.953098924597725e-06, Learning Rate: 1.2027901902911253e-05)
|
350 |
+
Step... (177500/437388 | Training Loss: -1.0114017641171813e-05, Learning Rate: 1.2005038115603384e-05)
|
351 |
+
Step... (178000/437388 | Training Loss: -9.99200165097136e-06, Learning Rate: 1.1982176147284918e-05)
|
352 |
+
Step... (178500/437388 | Training Loss: -9.968525773729198e-06, Learning Rate: 1.195931326947175e-05)
|
353 |
+
Step... (179000/437388 | Training Loss: -9.986906661652029e-06, Learning Rate: 1.193644948216388e-05)
|
354 |
+
Step... (179500/437388 | Training Loss: -9.977569789043628e-06, Learning Rate: 1.1913586604350712e-05)
|
355 |
+
Step... (180000/437388 | Training Loss: -9.949051673174836e-06, Learning Rate: 1.1890723726537544e-05)
|
356 |
+
Step... (180500/437388 | Training Loss: -9.964109267457388e-06, Learning Rate: 1.1867861758219078e-05)
|
357 |
+
Step... (181000/437388 | Training Loss: -9.971396139007993e-06, Learning Rate: 1.1844997061416507e-05)
|
358 |
+
Step... (181500/437388 | Training Loss: -9.980771210393868e-06, Learning Rate: 1.182213509309804e-05)
|
359 |
+
Step... (182000/437388 | Training Loss: -9.988624697143678e-06, Learning Rate: 1.179927039629547e-05)
|
360 |
+
Step... (182500/437388 | Training Loss: -9.988372767111287e-06, Learning Rate: 1.1776408427977003e-05)
|
361 |
+
Step... (183000/437388 | Training Loss: -9.979199603549205e-06, Learning Rate: 1.1753545550163835e-05)
|
362 |
+
Step... (183500/437388 | Training Loss: -9.967914593289606e-06, Learning Rate: 1.1730682672350667e-05)
|
363 |
+
Step... (184000/437388 | Training Loss: -9.89844193099998e-06, Learning Rate: 1.17078197945375e-05)
|
364 |
+
Step... (184500/437388 | Training Loss: -1.0040076631412376e-05, Learning Rate: 1.168495600722963e-05)
|
365 |
+
Step... (185000/437388 | Training Loss: -9.980107279261574e-06, Learning Rate: 1.1662094038911164e-05)
|
366 |
+
Step... (185500/437388 | Training Loss: -9.925603080773726e-06, Learning Rate: 1.1639230251603294e-05)
|
367 |
+
Step... (186000/437388 | Training Loss: -9.989335012505762e-06, Learning Rate: 1.1616368283284828e-05)
|
368 |
+
Step... (186500/437388 | Training Loss: -9.844949090620503e-06, Learning Rate: 1.1593504495976958e-05)
|
369 |
+
Step... (187000/437388 | Training Loss: -9.827876965573523e-06, Learning Rate: 1.157064161816379e-05)
|
370 |
+
Step... (187500/437388 | Training Loss: -9.985111319110729e-06, Learning Rate: 1.1547778740350623e-05)
|
371 |
+
Step... (188000/437388 | Training Loss: -9.98419909592485e-06, Learning Rate: 1.1524914953042753e-05)
|
372 |
+
Step... (188500/437388 | Training Loss: -9.822408173931763e-06, Learning Rate: 1.1502053894218989e-05)
|
373 |
+
Step... (189000/437388 | Training Loss: -9.985793440137058e-06, Learning Rate: 1.1479189197416417e-05)
|
374 |
+
Step... (189500/437388 | Training Loss: -9.969493476091884e-06, Learning Rate: 1.1456327229097951e-05)
|
375 |
+
Step... (190000/437388 | Training Loss: -9.972950465453323e-06, Learning Rate: 1.1433464351284783e-05)
|
376 |
+
Step... (190500/437388 | Training Loss: -9.980501090467442e-06, Learning Rate: 1.1410600563976914e-05)
|
377 |
+
Step... (191000/437388 | Training Loss: -9.985233191400766e-06, Learning Rate: 1.1387737686163746e-05)
|
378 |
+
Step... (191500/437388 | Training Loss: -9.974917702493258e-06, Learning Rate: 1.1364874808350578e-05)
|
379 |
+
Step... (192000/437388 | Training Loss: -9.982271876651794e-06, Learning Rate: 1.1342012840032112e-05)
|
380 |
+
Step... (192500/437388 | Training Loss: -9.983750715036876e-06, Learning Rate: 1.131914814322954e-05)
|
381 |
+
Step... (193000/437388 | Training Loss: -9.986609256884549e-06, Learning Rate: 1.1296286174911074e-05)
|
382 |
+
Step... (193500/437388 | Training Loss: -9.981191396946087e-06, Learning Rate: 1.1273422387603205e-05)
|
383 |
+
Step... (194000/437388 | Training Loss: -9.990637408918701e-06, Learning Rate: 1.1250560419284739e-05)
|
384 |
+
Step... (194500/437388 | Training Loss: -9.983663403545506e-06, Learning Rate: 1.1227696631976869e-05)
|
385 |
+
Step... (195000/437388 | Training Loss: -9.875216164800804e-06, Learning Rate: 1.1204833754163701e-05)
|
386 |
+
Step... (195500/437388 | Training Loss: -9.98624091153033e-06, Learning Rate: 1.1181970876350533e-05)
|
387 |
+
Step... (196000/437388 | Training Loss: -9.985752512875479e-06, Learning Rate: 1.1159107089042664e-05)
|
388 |
+
Step... (196500/437388 | Training Loss: -9.983903510146774e-06, Learning Rate: 1.1136245120724197e-05)
|
389 |
+
Step... (197000/437388 | Training Loss: -9.985045835492201e-06, Learning Rate: 1.1113381333416328e-05)
|
390 |
+
Step... (197500/437388 | Training Loss: -9.98608174995752e-06, Learning Rate: 1.1090519365097862e-05)
|
391 |
+
Step... (198000/437388 | Training Loss: -9.988999408960808e-06, Learning Rate: 1.1067655577789992e-05)
|
392 |
+
Step... (198500/437388 | Training Loss: -9.985302312998101e-06, Learning Rate: 1.1044792699976824e-05)
|
393 |
+
Step... (199000/437388 | Training Loss: -9.98492214421276e-06, Learning Rate: 1.1021929822163656e-05)
|
394 |
+
Step... (199500/437388 | Training Loss: -9.997691449825652e-06, Learning Rate: 1.0999066944350488e-05)
|
395 |
+
Step... (200000/437388 | Training Loss: -9.995275831897743e-06, Learning Rate: 1.0976204976032022e-05)
|
396 |
+
Step... (200500/437388 | Training Loss: -9.980434697354212e-06, Learning Rate: 1.0953340279229451e-05)
|
397 |
+
Step... (201000/437388 | Training Loss: -9.986708391807042e-06, Learning Rate: 1.0930478310910985e-05)
|
398 |
+
Step... (201500/437388 | Training Loss: -9.995856089517474e-06, Learning Rate: 1.0907613614108413e-05)
|
399 |
+
Step... (202000/437388 | Training Loss: -9.988008969230577e-06, Learning Rate: 1.0884751645789947e-05)
|
400 |
+
Step... (202500/437388 | Training Loss: -9.97873030428309e-06, Learning Rate: 1.086188876797678e-05)
|
401 |
+
Step... (203000/437388 | Training Loss: -9.833105650614016e-06, Learning Rate: 1.0839025890163612e-05)
|
402 |
+
Step... (203500/437388 | Training Loss: -9.990989383368287e-06, Learning Rate: 1.0816162102855742e-05)
|
403 |
+
Step... (204000/437388 | Training Loss: -9.993271305575036e-06, Learning Rate: 1.0793299225042574e-05)
|
404 |
+
Step... (204500/437388 | Training Loss: -9.977482477552257e-06, Learning Rate: 1.0770437256724108e-05)
|
405 |
+
Step... (205000/437388 | Training Loss: -9.986313671106473e-06, Learning Rate: 1.0747573469416238e-05)
|
406 |
+
Step... (205500/437388 | Training Loss: -9.995596883527469e-06, Learning Rate: 1.0724711501097772e-05)
|
407 |
+
Step... (206000/437388 | Training Loss: -9.83860172709683e-06, Learning Rate: 1.0701847713789903e-05)
|
408 |
+
Step... (206500/437388 | Training Loss: -9.98814903141465e-06, Learning Rate: 1.0678984835976735e-05)
|
409 |
+
Step... (207000/437388 | Training Loss: -9.978743037208915e-06, Learning Rate: 1.0656121958163567e-05)
|
410 |
+
Step... (207500/437388 | Training Loss: -9.858611520030536e-06, Learning Rate: 1.0633258170855697e-05)
|
411 |
+
Step... (208000/437388 | Training Loss: -9.97613460640423e-06, Learning Rate: 1.0610396202537231e-05)
|
412 |
+
Step... (208500/437388 | Training Loss: -9.987576049752533e-06, Learning Rate: 1.0587532415229362e-05)
|
413 |
+
Step... (209000/437388 | Training Loss: -9.988930287363473e-06, Learning Rate: 1.0564670446910895e-05)
|
414 |
+
Step... (209500/437388 | Training Loss: -9.988874808186665e-06, Learning Rate: 1.0541807569097728e-05)
|
415 |
+
Step... (210000/437388 | Training Loss: -9.970885002985597e-06, Learning Rate: 1.0518943781789858e-05)
|
416 |
+
Step... (210500/437388 | Training Loss: -9.983324162021745e-06, Learning Rate: 1.049608090397669e-05)
|
417 |
+
Step... (211000/437388 | Training Loss: -9.991470506065525e-06, Learning Rate: 1.0473218026163522e-05)
|
418 |
+
Step... (211500/437388 | Training Loss: -9.98959058051696e-06, Learning Rate: 1.0450356057845056e-05)
|
419 |
+
Step... (212000/437388 | Training Loss: -9.996974768000655e-06, Learning Rate: 1.0427491361042485e-05)
|
420 |
+
Step... (212500/437388 | Training Loss: -9.991736078518443e-06, Learning Rate: 1.0404629392724019e-05)
|
421 |
+
Step... (213000/437388 | Training Loss: -9.993411367759109e-06, Learning Rate: 1.0381764695921447e-05)
|
422 |
+
Step... (213500/437388 | Training Loss: -9.990038051910233e-06, Learning Rate: 1.0358902727602981e-05)
|
423 |
+
Step... (214000/437388 | Training Loss: -9.994332685892005e-06, Learning Rate: 1.0336039849789813e-05)
|
424 |
+
Step... (214500/437388 | Training Loss: -9.982057235902175e-06, Learning Rate: 1.0313176971976645e-05)
|
425 |
+
Step... (215000/437388 | Training Loss: -9.99447911453899e-06, Learning Rate: 1.0290314094163477e-05)
|
426 |
+
Step... (215500/437388 | Training Loss: -9.990168109652586e-06, Learning Rate: 1.0267450306855608e-05)
|
427 |
+
Step... (216000/437388 | Training Loss: -9.987637895392254e-06, Learning Rate: 1.0244588338537142e-05)
|
428 |
+
Step... (216500/437388 | Training Loss: -9.981984476326033e-06, Learning Rate: 1.0221724551229272e-05)
|
429 |
+
Step... (217000/437388 | Training Loss: -9.984448297473136e-06, Learning Rate: 1.0198862582910806e-05)
|
430 |
+
Step... (217500/437388 | Training Loss: -9.993505045713391e-06, Learning Rate: 1.0175998795602936e-05)
|
431 |
+
Step... (218000/437388 | Training Loss: -9.99086751107825e-06, Learning Rate: 1.0153135917789768e-05)
|
432 |
+
Step... (218500/437388 | Training Loss: -9.909585060086101e-06, Learning Rate: 1.01302730399766e-05)
|
433 |
+
Step... (219000/437388 | Training Loss: -1.0126263987331185e-05, Learning Rate: 1.0107409252668731e-05)
|
434 |
+
Step... (219500/437388 | Training Loss: -9.997625966207124e-06, Learning Rate: 1.0084548193844967e-05)
|
435 |
+
Step... (220000/437388 | Training Loss: -9.99296025838703e-06, Learning Rate: 1.0061683497042395e-05)
|
436 |
+
Step... (220500/437388 | Training Loss: -9.985401447920594e-06, Learning Rate: 1.0038821528723929e-05)
|
437 |
+
Step... (221000/437388 | Training Loss: -9.983653399103787e-06, Learning Rate: 1.0015958650910761e-05)
|
438 |
+
Step... (221500/437388 | Training Loss: -9.855179087026045e-06, Learning Rate: 9.993094863602892e-06)
|
439 |
+
Step... (222000/437388 | Training Loss: -9.978019079426304e-06, Learning Rate: 9.970231985789724e-06)
|
440 |
+
Step... (222500/437388 | Training Loss: -9.991362276196014e-06, Learning Rate: 9.947369107976556e-06)
|
441 |
+
Step... (223000/437388 | Training Loss: -9.984462849388365e-06, Learning Rate: 9.924505320668686e-06)
|
442 |
+
Step... (223500/437388 | Training Loss: -9.991919796448201e-06, Learning Rate: 9.901642442855518e-06)
|
443 |
+
Step... (224000/437388 | Training Loss: -9.852660696196835e-06, Learning Rate: 9.878780474537052e-06)
|
444 |
+
Step... (224500/437388 | Training Loss: -9.99262192635797e-06, Learning Rate: 9.855917596723884e-06)
|
445 |
+
Step... (225000/437388 | Training Loss: -9.992707418859936e-06, Learning Rate: 9.833054718910716e-06)
|
446 |
+
Step... (225500/437388 | Training Loss: -9.999449503084179e-06, Learning Rate: 9.810190931602847e-06)
|
447 |
+
Step... (226000/437388 | Training Loss: -9.58389227889711e-06, Learning Rate: 9.787328053789679e-06)
|
448 |
+
Step... (226500/437388 | Training Loss: -9.977780791814439e-06, Learning Rate: 9.764465175976511e-06)
|
449 |
+
Step... (227000/437388 | Training Loss: -9.989915270125493e-06, Learning Rate: 9.741601388668641e-06)
|
450 |
+
Step... (227500/437388 | Training Loss: -9.993409548769705e-06, Learning Rate: 9.718738510855474e-06)
|
451 |
+
Step... (228000/437388 | Training Loss: -9.988070814870298e-06, Learning Rate: 9.695876542537007e-06)
|
452 |
+
Step... (228500/437388 | Training Loss: -9.985919859900605e-06, Learning Rate: 9.67301366472384e-06)
|
453 |
+
Step... (229000/437388 | Training Loss: -9.696464985609055e-06, Learning Rate: 9.65014987741597e-06)
|
454 |
+
Step... (229500/437388 | Training Loss: -9.992392733693123e-06, Learning Rate: 9.627286999602802e-06)
|
455 |
+
Step... (230000/437388 | Training Loss: -9.97125061985571e-06, Learning Rate: 9.604424121789634e-06)
|
456 |
+
Step... (230500/437388 | Training Loss: -9.985516044253018e-06, Learning Rate: 9.581561243976466e-06)
|
457 |
+
Step... (231000/437388 | Training Loss: -9.988828423956875e-06, Learning Rate: 9.558697456668597e-06)
|
458 |
+
Step... (231500/437388 | Training Loss: -9.853993105934933e-06, Learning Rate: 9.535834578855429e-06)
|
459 |
+
Step... (232000/437388 | Training Loss: -9.98645555227995e-06, Learning Rate: 9.512972610536963e-06)
|
460 |
+
Step... (232500/437388 | Training Loss: -9.985138603951782e-06, Learning Rate: 9.490109732723795e-06)
|
461 |
+
Step... (233000/437388 | Training Loss: -9.986102668335661e-06, Learning Rate: 9.467245945415925e-06)
|
462 |
+
Step... (233500/437388 | Training Loss: -9.983748896047473e-06, Learning Rate: 9.444383067602757e-06)
|
463 |
+
Step... (234000/437388 | Training Loss: -9.987346857087687e-06, Learning Rate: 9.42152018978959e-06)
|
464 |
+
Step... (234500/437388 | Training Loss: -9.971370673156343e-06, Learning Rate: 9.39865640248172e-06)
|
465 |
+
Step... (235000/437388 | Training Loss: -9.881186997517943e-06, Learning Rate: 9.375793524668552e-06)
|
466 |
+
Step... (235500/437388 | Training Loss: -9.987215889850631e-06, Learning Rate: 9.352930646855384e-06)
|
467 |
+
Step... (236000/437388 | Training Loss: -9.983035852201283e-06, Learning Rate: 9.330068678536918e-06)
|
468 |
+
Step... (236500/437388 | Training Loss: -9.984838470700197e-06, Learning Rate: 9.30720580072375e-06)
|
469 |
+
Step... (237000/437388 | Training Loss: -9.986998520616908e-06, Learning Rate: 9.28434201341588e-06)
|
470 |
+
Step... (237500/437388 | Training Loss: -9.974377462640405e-06, Learning Rate: 9.261479135602713e-06)
|
471 |
+
Step... (238000/437388 | Training Loss: -9.987925295718014e-06, Learning Rate: 9.238616257789545e-06)
|
472 |
+
Step... (238500/437388 | Training Loss: -9.986457371269353e-06, Learning Rate: 9.215752470481675e-06)
|
473 |
+
Step... (239000/437388 | Training Loss: -9.99090843833983e-06, Learning Rate: 9.192889592668507e-06)
|
474 |
+
Step... (239500/437388 | Training Loss: -9.988229066948406e-06, Learning Rate: 9.170027624350041e-06)
|
475 |
+
Step... (240000/437388 | Training Loss: -9.994514584832359e-06, Learning Rate: 9.147164746536873e-06)
|
476 |
+
Step... (240500/437388 | Training Loss: -9.993158528232016e-06, Learning Rate: 9.124301868723705e-06)
|
477 |
+
Step... (241000/437388 | Training Loss: -9.996389962907415e-06, Learning Rate: 9.101438081415836e-06)
|
478 |
+
Step... (241500/437388 | Training Loss: -9.979153219319414e-06, Learning Rate: 9.078575203602668e-06)
|
479 |
+
Step... (242000/437388 | Training Loss: -9.989636055252049e-06, Learning Rate: 9.0557123257895e-06)
|
480 |
+
Step... (242500/437388 | Training Loss: -9.993711501010694e-06, Learning Rate: 9.03284853848163e-06)
|
481 |
+
Step... (243000/437388 | Training Loss: -9.888614840747323e-06, Learning Rate: 9.009985660668463e-06)
|
482 |
+
Step... (243500/437388 | Training Loss: -9.989265890908428e-06, Learning Rate: 8.987123692349996e-06)
|
483 |
+
Step... (244000/437388 | Training Loss: -9.879224307951517e-06, Learning Rate: 8.964260814536829e-06)
|
484 |
+
Step... (244500/437388 | Training Loss: -9.99042094917968e-06, Learning Rate: 8.941397027228959e-06)
|
485 |
+
Step... (245000/437388 | Training Loss: -9.990229045797605e-06, Learning Rate: 8.918534149415791e-06)
|
486 |
+
Step... (245500/437388 | Training Loss: -9.98977520794142e-06, Learning Rate: 8.895671271602623e-06)
|
487 |
+
Step... (246000/437388 | Training Loss: -9.997487723012455e-06, Learning Rate: 8.872808393789455e-06)
|
488 |
+
Step... (246500/437388 | Training Loss: -9.986724762711674e-06, Learning Rate: 8.849944606481586e-06)
|
489 |
+
Step... (247000/437388 | Training Loss: -9.988412784878165e-06, Learning Rate: 8.827081728668418e-06)
|
490 |
+
Step... (247500/437388 | Training Loss: -9.990839316742495e-06, Learning Rate: 8.804219760349952e-06)
|
491 |
+
Step... (248000/437388 | Training Loss: -9.991884326154832e-06, Learning Rate: 8.781356882536784e-06)
|
492 |
+
Step... (248500/437388 | Training Loss: -9.986639270209707e-06, Learning Rate: 8.758493095228914e-06)
|
493 |
+
Step... (249000/437388 | Training Loss: -9.890503861242905e-06, Learning Rate: 8.735630217415746e-06)
|
494 |
+
Step... (249500/437388 | Training Loss: -9.99127769318875e-06, Learning Rate: 8.712767339602578e-06)
|
495 |
+
Step... (250000/437388 | Training Loss: -9.987336852645967e-06, Learning Rate: 8.689903552294709e-06)
|
496 |
+
Step... (250500/437388 | Training Loss: -9.989980753744021e-06, Learning Rate: 8.667040674481541e-06)
|
497 |
+
Step... (251000/437388 | Training Loss: -9.989371392293833e-06, Learning Rate: 8.644177796668373e-06)
|
498 |
+
Step... (251500/437388 | Training Loss: -9.898749340209179e-06, Learning Rate: 8.621315828349907e-06)
|
499 |
+
Step... (252000/437388 | Training Loss: -9.987928933696821e-06, Learning Rate: 8.598452950536739e-06)
|
500 |
+
Step... (252500/437388 | Training Loss: -9.976372894016095e-06, Learning Rate: 8.57558916322887e-06)
|
501 |
+
Step... (253000/437388 | Training Loss: -9.986349141399842e-06, Learning Rate: 8.552726285415702e-06)
|
502 |
+
Step... (253500/437388 | Training Loss: -9.896906703943387e-06, Learning Rate: 8.529863407602534e-06)
|
503 |
+
Step... (254000/437388 | Training Loss: -9.98828727460932e-06, Learning Rate: 8.506999620294664e-06)
|
504 |
+
Step... (254500/437388 | Training Loss: -9.989612408389803e-06, Learning Rate: 8.484136742481496e-06)
|
505 |
+
Step... (255000/437388 | Training Loss: -9.949071682058275e-06, Learning Rate: 8.46127477416303e-06)
|
506 |
+
Step... (255500/437388 | Training Loss: -9.882474842015654e-06, Learning Rate: 8.438411896349862e-06)
|
507 |
+
Step... (256000/437388 | Training Loss: -9.836573553911876e-06, Learning Rate: 8.415549018536694e-06)
|
508 |
+
Step... (256500/437388 | Training Loss: -9.986971235775854e-06, Learning Rate: 8.392685231228825e-06)
|
509 |
+
Step... (257000/437388 | Training Loss: -9.986095392378047e-06, Learning Rate: 8.369822353415657e-06)
|
510 |
+
Step... (257500/437388 | Training Loss: -9.99188887362834e-06, Learning Rate: 8.346959475602489e-06)
|
511 |
+
Step... (258000/437388 | Training Loss: -9.967543519451283e-06, Learning Rate: 8.32409568829462e-06)
|
512 |
+
Step... (258500/437388 | Training Loss: -9.990917533286847e-06, Learning Rate: 8.301232810481451e-06)
|
513 |
+
Step... (259000/437388 | Training Loss: -9.987032171920873e-06, Learning Rate: 8.278370842162985e-06)
|
514 |
+
Step... (259500/437388 | Training Loss: -9.992443665396422e-06, Learning Rate: 8.255507964349817e-06)
|
515 |
+
Step... (260000/437388 | Training Loss: -9.987326848204248e-06, Learning Rate: 8.232644177041948e-06)
|
516 |
+
Step... (260500/437388 | Training Loss: -9.988285455619916e-06, Learning Rate: 8.20978129922878e-06)
|
517 |
+
Step... (261000/437388 | Training Loss: -9.995150321628898e-06, Learning Rate: 8.186918421415612e-06)
|
518 |
+
Step... (261500/437388 | Training Loss: -9.988363672164269e-06, Learning Rate: 8.164055543602444e-06)
|
519 |
+
Step... (262000/437388 | Training Loss: -9.995621439884417e-06, Learning Rate: 8.141191756294575e-06)
|
520 |
+
Step... (262500/437388 | Training Loss: -9.986416444007773e-06, Learning Rate: 8.118328878481407e-06)
|
521 |
+
Step... (263000/437388 | Training Loss: -9.989635145757347e-06, Learning Rate: 8.09546691016294e-06)
|
522 |
+
Step... (263500/437388 | Training Loss: -9.992758350563236e-06, Learning Rate: 8.072604032349773e-06)
|
523 |
+
Step... (264000/437388 | Training Loss: -9.988560123019852e-06, Learning Rate: 8.049740245041903e-06)
|
524 |
+
Step... (264500/437388 | Training Loss: -9.983908967114985e-06, Learning Rate: 8.026877367228735e-06)
|
525 |
+
Step... (265000/437388 | Training Loss: -9.989878890337422e-06, Learning Rate: 8.004014489415567e-06)
|
526 |
+
Step... (265500/437388 | Training Loss: -9.982097253669053e-06, Learning Rate: 7.981150702107698e-06)
|
527 |
+
Step... (266000/437388 | Training Loss: -9.986391887650825e-06, Learning Rate: 7.95828782429453e-06)
|
528 |
+
Step... (266500/437388 | Training Loss: -9.987573321268428e-06, Learning Rate: 7.935424946481362e-06)
|
529 |
+
Step... (267000/437388 | Training Loss: -9.99004168988904e-06, Learning Rate: 7.912562978162896e-06)
|
530 |
+
Step... (267500/437388 | Training Loss: -9.990327271225397e-06, Learning Rate: 7.889700100349728e-06)
|
531 |
+
Step... (268000/437388 | Training Loss: -9.991719707613811e-06, Learning Rate: 7.866836313041858e-06)
|
532 |
+
Step... (268500/437388 | Training Loss: -9.99336589302402e-06, Learning Rate: 7.84397343522869e-06)
|
533 |
+
Step... (269000/437388 | Training Loss: -9.927234714268707e-06, Learning Rate: 7.821110557415523e-06)
|
534 |
+
Step... (269500/437388 | Training Loss: -9.992361810873263e-06, Learning Rate: 7.798246770107653e-06)
|
535 |
+
Step... (270000/437388 | Training Loss: -9.868877896224149e-06, Learning Rate: 7.775383892294485e-06)
|
536 |
+
Step... (270500/437388 | Training Loss: -9.974767635867465e-06, Learning Rate: 7.752521923976019e-06)
|
537 |
+
Step... (271000/437388 | Training Loss: -9.993795174523257e-06, Learning Rate: 7.729659046162851e-06)
|
538 |
+
Step... (271500/437388 | Training Loss: -9.992438208428212e-06, Learning Rate: 7.706796168349683e-06)
|
539 |
+
Step... (272000/437388 | Training Loss: -9.864586900221184e-06, Learning Rate: 7.683932381041814e-06)
|
540 |
+
Step... (272500/437388 | Training Loss: -9.992800187319517e-06, Learning Rate: 7.661069503228646e-06)
|
541 |
+
Step... (273000/437388 | Training Loss: -9.991427759814542e-06, Learning Rate: 7.638206625415478e-06)
|
542 |
+
Step... (273500/437388 | Training Loss: -9.99362782749813e-06, Learning Rate: 7.615343292854959e-06)
|
543 |
+
Step... (274000/437388 | Training Loss: -9.998650966736022e-06, Learning Rate: 7.59247996029444e-06)
|
544 |
+
Step... (274500/437388 | Training Loss: -9.986426448449492e-06, Learning Rate: 7.569617991975974e-06)
|
545 |
+
Step... (275000/437388 | Training Loss: -9.892217349261045e-06, Learning Rate: 7.5467546594154555e-06)
|
546 |
+
Step... (275500/437388 | Training Loss: -9.984280040953308e-06, Learning Rate: 7.523891781602288e-06)
|
547 |
+
Step... (276000/437388 | Training Loss: -9.993502317229286e-06, Learning Rate: 7.501028449041769e-06)
|
548 |
+
Step... (276500/437388 | Training Loss: -9.994282663683407e-06, Learning Rate: 7.478165571228601e-06)
|
549 |
+
Step... (277000/437388 | Training Loss: -9.989940735977143e-06, Learning Rate: 7.455302238668082e-06)
|
550 |
+
Step... (277500/437388 | Training Loss: -9.871511792880483e-06, Learning Rate: 7.432439360854914e-06)
|
551 |
+
Step... (278000/437388 | Training Loss: -9.991127626562957e-06, Learning Rate: 7.409576028294396e-06)
|
552 |
+
Step... (278500/437388 | Training Loss: -9.99518670141697e-06, Learning Rate: 7.3867140599759296e-06)
|
553 |
+
Step... (279000/437388 | Training Loss: -9.990813850890845e-06, Learning Rate: 7.363850727415411e-06)
|
554 |
+
Step... (279500/437388 | Training Loss: -9.992672858061269e-06, Learning Rate: 7.340987849602243e-06)
|
555 |
+
Step... (280000/437388 | Training Loss: -9.99051371763926e-06, Learning Rate: 7.318124517041724e-06)
|
556 |
+
Step... (280500/437388 | Training Loss: -9.989868885895703e-06, Learning Rate: 7.295261639228556e-06)
|
557 |
+
Step... (281000/437388 | Training Loss: -9.992634659283794e-06, Learning Rate: 7.2723983066680375e-06)
|
558 |
+
Step... (281500/437388 | Training Loss: -9.985859833250288e-06, Learning Rate: 7.249534974107519e-06)
|
559 |
+
Step... (282000/437388 | Training Loss: -9.987268640543334e-06, Learning Rate: 7.226672096294351e-06)
|
560 |
+
Step... (282500/437388 | Training Loss: -9.997535016736947e-06, Learning Rate: 7.203810127975885e-06)
|
561 |
+
Step... (283000/437388 | Training Loss: -9.988288184104022e-06, Learning Rate: 7.180946795415366e-06)
|
562 |
+
Step... (283500/437388 | Training Loss: -9.813671567826532e-06, Learning Rate: 7.158083917602198e-06)
|
563 |
+
Step... (284000/437388 | Training Loss: -9.99168332782574e-06, Learning Rate: 7.1352205850416794e-06)
|
564 |
+
Step... (284500/437388 | Training Loss: -9.990868420572951e-06, Learning Rate: 7.112357252481161e-06)
|
565 |
+
Step... (285000/437388 | Training Loss: -9.990955732064322e-06, Learning Rate: 7.089494374667993e-06)
|
566 |
+
Step... (285500/437388 | Training Loss: -9.994293577619828e-06, Learning Rate: 7.066631042107474e-06)
|
567 |
+
Step... (286000/437388 | Training Loss: -9.994688298320398e-06, Learning Rate: 7.043769073789008e-06)
|
568 |
+
Step... (286500/437388 | Training Loss: -9.88380634225905e-06, Learning Rate: 7.02090619597584e-06)
|
569 |
+
Step... (287000/437388 | Training Loss: -9.993795174523257e-06, Learning Rate: 6.998042863415321e-06)
|
570 |
+
Step... (287500/437388 | Training Loss: -9.992005288950168e-06, Learning Rate: 6.975179530854803e-06)
|
571 |
+
Step... (288000/437388 | Training Loss: -9.986464647226967e-06, Learning Rate: 6.952316653041635e-06)
|
572 |
+
Step... (288500/437388 | Training Loss: -9.993902494898066e-06, Learning Rate: 6.929453320481116e-06)
|