|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 960, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999665344773905e-05, |
|
"loss": 2.2777, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9986614686909146e-05, |
|
"loss": 2.1303, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.996988640512931e-05, |
|
"loss": 2.143, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.994647308096509e-05, |
|
"loss": 2.2047, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.991638098272951e-05, |
|
"loss": 1.9869, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.987961816680492e-05, |
|
"loss": 2.1792, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.983619447548611e-05, |
|
"loss": 2.0121, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9786121534345265e-05, |
|
"loss": 1.9919, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.972941274911953e-05, |
|
"loss": 1.9453, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.966608330212198e-05, |
|
"loss": 2.0629, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.9596150148176936e-05, |
|
"loss": 1.8407, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.951963201008076e-05, |
|
"loss": 1.9516, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.94365493735893e-05, |
|
"loss": 1.8641, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.934692448193334e-05, |
|
"loss": 1.8404, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.9250781329863606e-05, |
|
"loss": 1.9995, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.914814565722671e-05, |
|
"loss": 1.822, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.903904494207405e-05, |
|
"loss": 1.9633, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.892350839330522e-05, |
|
"loss": 1.8818, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.880156694284811e-05, |
|
"loss": 1.7518, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.867325323737765e-05, |
|
"loss": 1.8622, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.853860162957552e-05, |
|
"loss": 1.8353, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.839764816893315e-05, |
|
"loss": 1.9588, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.8250430592100306e-05, |
|
"loss": 1.9896, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.8096988312782174e-05, |
|
"loss": 1.8516, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.793736241118728e-05, |
|
"loss": 1.7615, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.7771595623029394e-05, |
|
"loss": 1.7356, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.759973232808609e-05, |
|
"loss": 1.9084, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.742181853831721e-05, |
|
"loss": 1.8052, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.7237901885546404e-05, |
|
"loss": 1.9753, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.7048031608708876e-05, |
|
"loss": 1.8283, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.6852258540668974e-05, |
|
"loss": 1.8031, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.665063509461097e-05, |
|
"loss": 1.842, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.6443215250006806e-05, |
|
"loss": 1.8729, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.6230054538164474e-05, |
|
"loss": 1.824, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.601121002736095e-05, |
|
"loss": 1.5677, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.5786740307563636e-05, |
|
"loss": 1.8157, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.555670547474438e-05, |
|
"loss": 1.7408, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.5321167114790385e-05, |
|
"loss": 1.8033, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.508018828701612e-05, |
|
"loss": 1.3534, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.4833833507280884e-05, |
|
"loss": 1.167, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.458216873071626e-05, |
|
"loss": 1.0725, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.4325261334068426e-05, |
|
"loss": 1.0958, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.406318009765971e-05, |
|
"loss": 1.223, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.379599518697444e-05, |
|
"loss": 1.1112, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.352377813387398e-05, |
|
"loss": 1.0701, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.324660181744589e-05, |
|
"loss": 1.1594, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.296454044449245e-05, |
|
"loss": 1.1602, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.267766952966369e-05, |
|
"loss": 1.1109, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.238606587524029e-05, |
|
"loss": 1.0719, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.208980755057178e-05, |
|
"loss": 1.1497, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.178897387117546e-05, |
|
"loss": 1.2594, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.148364537750172e-05, |
|
"loss": 1.1576, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.117390381337144e-05, |
|
"loss": 1.1553, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.085983210409114e-05, |
|
"loss": 1.1414, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.054151433425194e-05, |
|
"loss": 1.241, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.021903572521802e-05, |
|
"loss": 1.1757, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.9892482612310836e-05, |
|
"loss": 1.1875, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.956194242169506e-05, |
|
"loss": 1.0741, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.922750364697246e-05, |
|
"loss": 1.1855, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.888925582549006e-05, |
|
"loss": 1.1681, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.854728951436879e-05, |
|
"loss": 1.1153, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.82016962662592e-05, |
|
"loss": 1.2223, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.785256860483054e-05, |
|
"loss": 1.1862, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.2807, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 3.7144084842908505e-05, |
|
"loss": 1.1803, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.678491842064995e-05, |
|
"loss": 1.2763, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.6422596890760517e-05, |
|
"loss": 1.2246, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.6057217255475034e-05, |
|
"loss": 1.0887, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.568887733575706e-05, |
|
"loss": 1.1806, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.5317675745109866e-05, |
|
"loss": 1.0842, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.494371186317528e-05, |
|
"loss": 1.0559, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.456708580912725e-05, |
|
"loss": 1.0297, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.418789841486759e-05, |
|
"loss": 1.0726, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.380625119803084e-05, |
|
"loss": 1.1097, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.34222463348055e-05, |
|
"loss": 1.0985, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.303598663257904e-05, |
|
"loss": 1.182, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.264757550241384e-05, |
|
"loss": 1.0829, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.225711693136156e-05, |
|
"loss": 0.6117, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.186471545462331e-05, |
|
"loss": 0.53, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.147047612756302e-05, |
|
"loss": 0.6219, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.10745044975816e-05, |
|
"loss": 0.528, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.0676906575859334e-05, |
|
"loss": 0.5044, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.0277788808974128e-05, |
|
"loss": 0.6492, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.9877258050403212e-05, |
|
"loss": 0.5539, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.9475421531915827e-05, |
|
"loss": 0.6158, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.9072386834864724e-05, |
|
"loss": 0.5606, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.8668261861384045e-05, |
|
"loss": 0.4933, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8263154805501297e-05, |
|
"loss": 0.5449, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.7857174124171165e-05, |
|
"loss": 0.5829, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.7450428508239024e-05, |
|
"loss": 0.5175, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.704302685334171e-05, |
|
"loss": 0.5473, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.663507823075358e-05, |
|
"loss": 0.6025, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.6226691858185454e-05, |
|
"loss": 0.5818, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.5817977070544407e-05, |
|
"loss": 0.5363, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.5409043290662173e-05, |
|
"loss": 0.5645, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.5195, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.459095670933783e-05, |
|
"loss": 0.5496, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.41820229294556e-05, |
|
"loss": 0.5588, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.3773308141814552e-05, |
|
"loss": 0.5454, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.3364921769246423e-05, |
|
"loss": 0.5319, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 2.2956973146658303e-05, |
|
"loss": 0.5125, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.2549571491760986e-05, |
|
"loss": 0.5035, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.2142825875828838e-05, |
|
"loss": 0.5413, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.173684519449872e-05, |
|
"loss": 0.5031, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 2.1331738138615958e-05, |
|
"loss": 0.6249, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 2.0927613165135285e-05, |
|
"loss": 0.5193, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.052457846808419e-05, |
|
"loss": 0.5967, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0122741949596797e-05, |
|
"loss": 0.5391, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.972221119102587e-05, |
|
"loss": 0.5601, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.932309342414067e-05, |
|
"loss": 0.4991, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8925495502418406e-05, |
|
"loss": 0.6078, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.852952387243698e-05, |
|
"loss": 0.5349, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.81352845453767e-05, |
|
"loss": 0.4897, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.7742883068638447e-05, |
|
"loss": 0.5732, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.7352424497586163e-05, |
|
"loss": 0.5019, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.6964013367420966e-05, |
|
"loss": 0.3509, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.65777536651945e-05, |
|
"loss": 0.2495, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.6193748801969163e-05, |
|
"loss": 0.265, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.5812101585132417e-05, |
|
"loss": 0.2412, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.5432914190872757e-05, |
|
"loss": 0.2662, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.5056288136824726e-05, |
|
"loss": 0.241, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.4682324254890134e-05, |
|
"loss": 0.2568, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.4311122664242954e-05, |
|
"loss": 0.2462, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.3942782744524973e-05, |
|
"loss": 0.2399, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.3577403109239484e-05, |
|
"loss": 0.2486, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.3215081579350058e-05, |
|
"loss": 0.2343, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.2855915157091497e-05, |
|
"loss": 0.2531, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.2500000000000006e-05, |
|
"loss": 0.2529, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.2147431395169459e-05, |
|
"loss": 0.2451, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.1798303733740802e-05, |
|
"loss": 0.2397, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.1452710485631216e-05, |
|
"loss": 0.2397, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.1110744174509952e-05, |
|
"loss": 0.2515, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.0772496353027538e-05, |
|
"loss": 0.2551, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.043805757830495e-05, |
|
"loss": 0.293, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.0107517387689166e-05, |
|
"loss": 0.2861, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 9.780964274781984e-06, |
|
"loss": 0.2599, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 9.45848566574807e-06, |
|
"loss": 0.2521, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 9.140167895908867e-06, |
|
"loss": 0.264, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.82609618662857e-06, |
|
"loss": 0.257, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 8.51635462249828e-06, |
|
"loss": 0.2621, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 8.211026128824539e-06, |
|
"loss": 0.2529, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 7.910192449428217e-06, |
|
"loss": 0.25, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 7.613934124759711e-06, |
|
"loss": 0.2652, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 7.3223304703363135e-06, |
|
"loss": 0.2448, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 7.035459555507548e-06, |
|
"loss": 0.238, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 6.753398182554116e-06, |
|
"loss": 0.2335, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 6.476221866126029e-06, |
|
"loss": 0.2498, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 6.204004813025568e-06, |
|
"loss": 0.2657, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 5.936819902340299e-06, |
|
"loss": 0.2439, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 5.674738665931575e-06, |
|
"loss": 0.2455, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 5.417831269283741e-06, |
|
"loss": 0.2569, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 5.166166492719124e-06, |
|
"loss": 0.2566, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.91981171298388e-06, |
|
"loss": 0.2502, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.678832885209622e-06, |
|
"loss": 0.2081, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 4.4432945252556284e-06, |
|
"loss": 0.1486, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 4.213259692436367e-06, |
|
"loss": 0.1647, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.988789972639045e-06, |
|
"loss": 0.152, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.769945461835531e-06, |
|
"loss": 0.1594, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 3.5567847499932e-06, |
|
"loss": 0.1576, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 3.3493649053890326e-06, |
|
"loss": 0.1622, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.147741459331033e-06, |
|
"loss": 0.1564, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.9519683912911266e-06, |
|
"loss": 0.1668, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.7620981144536013e-06, |
|
"loss": 0.1479, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.578181461682794e-06, |
|
"loss": 0.1558, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.4002676719139166e-06, |
|
"loss": 0.1483, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.2284043769706027e-06, |
|
"loss": 0.1556, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.0626375888127187e-06, |
|
"loss": 0.1634, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.9030116872178316e-06, |
|
"loss": 0.1428, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.7495694078996983e-06, |
|
"loss": 0.1491, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.6023518310668618e-06, |
|
"loss": 0.1633, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.4613983704244826e-06, |
|
"loss": 0.1471, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.3267467626223606e-06, |
|
"loss": 0.1481, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.1984330571518931e-06, |
|
"loss": 0.1464, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.0764916066947794e-06, |
|
"loss": 0.1658, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 9.609550579259496e-07, |
|
"loss": 0.1535, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 8.51854342773295e-07, |
|
"loss": 0.1548, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 7.492186701364007e-07, |
|
"loss": 0.1452, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 6.530755180666592e-07, |
|
"loss": 0.1518, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 5.634506264107053e-07, |
|
"loss": 0.153, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 4.803679899192392e-07, |
|
"loss": 0.1568, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 4.0384985182306265e-07, |
|
"loss": 0.1527, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 3.339166978780256e-07, |
|
"loss": 0.144, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.705872508804747e-07, |
|
"loss": 0.1509, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.1387846565474045e-07, |
|
"loss": 0.1418, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.6380552451389086e-07, |
|
"loss": 0.168, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.2038183319507955e-07, |
|
"loss": 0.156, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 8.361901727049204e-08, |
|
"loss": 0.1364, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 5.352691903491303e-08, |
|
"loss": 0.1482, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 3.011359487068987e-08, |
|
"loss": 0.1574, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.3385313090857887e-08, |
|
"loss": 0.1496, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.346552260954927e-09, |
|
"loss": 0.1475, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.1461, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 960, |
|
"total_flos": 1778473878159360.0, |
|
"train_loss": 0.8035268472507596, |
|
"train_runtime": 399.0968, |
|
"train_samples_per_second": 4.798, |
|
"train_steps_per_second": 2.405 |
|
} |
|
], |
|
"max_steps": 960, |
|
"num_train_epochs": 5, |
|
"total_flos": 1778473878159360.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|