|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 956, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010460251046025104, |
|
"grad_norm": 4.85831464931585, |
|
"learning_rate": 2.0833333333333333e-07, |
|
"loss": 1.3092, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005230125523012552, |
|
"grad_norm": 4.698008970388053, |
|
"learning_rate": 1.0416666666666667e-06, |
|
"loss": 1.2901, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010460251046025104, |
|
"grad_norm": 4.048984012433902, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 1.2488, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.015690376569037656, |
|
"grad_norm": 1.2617332917985404, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.1403, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02092050209205021, |
|
"grad_norm": 0.7560157485938099, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 1.1238, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02615062761506276, |
|
"grad_norm": 0.6324506842569716, |
|
"learning_rate": 5.208333333333334e-06, |
|
"loss": 1.0882, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03138075313807531, |
|
"grad_norm": 0.548469635609847, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.0972, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.036610878661087864, |
|
"grad_norm": 0.5552996818029777, |
|
"learning_rate": 7.291666666666667e-06, |
|
"loss": 1.0967, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04184100418410042, |
|
"grad_norm": 0.5800275526403403, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.0806, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04707112970711297, |
|
"grad_norm": 0.6526746495693823, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 1.0799, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05230125523012552, |
|
"grad_norm": 0.5908325566513732, |
|
"learning_rate": 1.0416666666666668e-05, |
|
"loss": 1.0776, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05753138075313807, |
|
"grad_norm": 0.5391654191606475, |
|
"learning_rate": 1.1458333333333333e-05, |
|
"loss": 1.0899, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06276150627615062, |
|
"grad_norm": 0.5595510219538938, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.0764, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06799163179916318, |
|
"grad_norm": 0.6189231853205227, |
|
"learning_rate": 1.3541666666666668e-05, |
|
"loss": 1.0822, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07322175732217573, |
|
"grad_norm": 0.6003927282619645, |
|
"learning_rate": 1.4583333333333333e-05, |
|
"loss": 1.0677, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07845188284518828, |
|
"grad_norm": 0.587656104310443, |
|
"learning_rate": 1.5625e-05, |
|
"loss": 1.094, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08368200836820083, |
|
"grad_norm": 0.6752900460018412, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.0929, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08891213389121339, |
|
"grad_norm": 0.584358866439279, |
|
"learning_rate": 1.7708333333333335e-05, |
|
"loss": 1.1021, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09414225941422594, |
|
"grad_norm": 0.6938741353728844, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 1.0856, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09937238493723849, |
|
"grad_norm": 0.5937738979114767, |
|
"learning_rate": 1.979166666666667e-05, |
|
"loss": 1.0862, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.10460251046025104, |
|
"grad_norm": 0.6710102145171454, |
|
"learning_rate": 1.9998932457674904e-05, |
|
"loss": 1.0933, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1098326359832636, |
|
"grad_norm": 0.5541556578263152, |
|
"learning_rate": 1.999459595761682e-05, |
|
"loss": 1.1147, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11506276150627615, |
|
"grad_norm": 0.7295308103628182, |
|
"learning_rate": 1.9986925223989665e-05, |
|
"loss": 1.093, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1202928870292887, |
|
"grad_norm": 0.5550755397232149, |
|
"learning_rate": 1.9975922815778082e-05, |
|
"loss": 1.0831, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12552301255230125, |
|
"grad_norm": 0.6906078000000908, |
|
"learning_rate": 1.996159240342547e-05, |
|
"loss": 1.0825, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1307531380753138, |
|
"grad_norm": 0.6064322930924732, |
|
"learning_rate": 1.9943938767609515e-05, |
|
"loss": 1.0831, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13598326359832635, |
|
"grad_norm": 0.5759088665056421, |
|
"learning_rate": 1.9922967797647357e-05, |
|
"loss": 1.0905, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1412133891213389, |
|
"grad_norm": 0.5922048567597843, |
|
"learning_rate": 1.9898686489530852e-05, |
|
"loss": 1.0974, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14644351464435146, |
|
"grad_norm": 0.582760629154959, |
|
"learning_rate": 1.9871102943592717e-05, |
|
"loss": 1.0731, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.151673640167364, |
|
"grad_norm": 0.5685111043758668, |
|
"learning_rate": 1.9840226361804223e-05, |
|
"loss": 1.1055, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15690376569037656, |
|
"grad_norm": 0.5443024454296611, |
|
"learning_rate": 1.9806067044705375e-05, |
|
"loss": 1.076, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16213389121338911, |
|
"grad_norm": 0.624292985384766, |
|
"learning_rate": 1.9768636387968616e-05, |
|
"loss": 1.1064, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16736401673640167, |
|
"grad_norm": 0.6240829821198746, |
|
"learning_rate": 1.9727946878597193e-05, |
|
"loss": 1.0791, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17259414225941422, |
|
"grad_norm": 0.5276766638385988, |
|
"learning_rate": 1.968401209075946e-05, |
|
"loss": 1.0839, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17782426778242677, |
|
"grad_norm": 0.5836785578453231, |
|
"learning_rate": 1.963684668126046e-05, |
|
"loss": 1.1019, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.18305439330543932, |
|
"grad_norm": 0.635087703249428, |
|
"learning_rate": 1.9586466384652366e-05, |
|
"loss": 1.0847, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18828451882845187, |
|
"grad_norm": 0.6401066654821815, |
|
"learning_rate": 1.9532888007985408e-05, |
|
"loss": 1.0956, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19351464435146443, |
|
"grad_norm": 0.6003213599436353, |
|
"learning_rate": 1.9476129425200935e-05, |
|
"loss": 1.0937, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19874476987447698, |
|
"grad_norm": 0.6459057767600639, |
|
"learning_rate": 1.9416209571168648e-05, |
|
"loss": 1.089, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.20397489539748953, |
|
"grad_norm": 0.5793932824335415, |
|
"learning_rate": 1.9353148435369816e-05, |
|
"loss": 1.0906, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.20920502092050208, |
|
"grad_norm": 0.5669130898905272, |
|
"learning_rate": 1.9286967055228744e-05, |
|
"loss": 1.093, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21443514644351463, |
|
"grad_norm": 0.6665999257578469, |
|
"learning_rate": 1.9217687509094596e-05, |
|
"loss": 1.0759, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2196652719665272, |
|
"grad_norm": 0.6045661696203776, |
|
"learning_rate": 1.9145332908875984e-05, |
|
"loss": 1.0911, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.22489539748953974, |
|
"grad_norm": 0.603921012093451, |
|
"learning_rate": 1.9069927392330742e-05, |
|
"loss": 1.1017, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2301255230125523, |
|
"grad_norm": 0.6981633005799726, |
|
"learning_rate": 1.89914961150135e-05, |
|
"loss": 1.082, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.23535564853556484, |
|
"grad_norm": 0.5312778797944372, |
|
"learning_rate": 1.891006524188368e-05, |
|
"loss": 1.0797, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2405857740585774, |
|
"grad_norm": 0.6185885471490489, |
|
"learning_rate": 1.8825661938576784e-05, |
|
"loss": 1.085, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.24581589958158995, |
|
"grad_norm": 0.6020254222541368, |
|
"learning_rate": 1.8738314362341828e-05, |
|
"loss": 1.0849, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2510460251046025, |
|
"grad_norm": 0.5477699753630808, |
|
"learning_rate": 1.864805165264799e-05, |
|
"loss": 1.0953, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.25627615062761505, |
|
"grad_norm": 0.6850363564163309, |
|
"learning_rate": 1.8554903921463578e-05, |
|
"loss": 1.0889, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2615062761506276, |
|
"grad_norm": 0.5908540938126509, |
|
"learning_rate": 1.8458902243210558e-05, |
|
"loss": 1.0822, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.26673640167364016, |
|
"grad_norm": 0.6783907257016317, |
|
"learning_rate": 1.8360078644398028e-05, |
|
"loss": 1.0788, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2719665271966527, |
|
"grad_norm": 0.5917684668235483, |
|
"learning_rate": 1.8258466092938042e-05, |
|
"loss": 1.101, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.27719665271966526, |
|
"grad_norm": 0.5760642704541414, |
|
"learning_rate": 1.8154098487147405e-05, |
|
"loss": 1.0776, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2824267782426778, |
|
"grad_norm": 0.7830659265090936, |
|
"learning_rate": 1.8047010644439074e-05, |
|
"loss": 1.0838, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.28765690376569036, |
|
"grad_norm": 0.5624324029328165, |
|
"learning_rate": 1.7937238289706927e-05, |
|
"loss": 1.0821, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2928870292887029, |
|
"grad_norm": 0.6142016239039022, |
|
"learning_rate": 1.7824818043407828e-05, |
|
"loss": 1.064, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.29811715481171547, |
|
"grad_norm": 0.5924790097587224, |
|
"learning_rate": 1.7709787409344893e-05, |
|
"loss": 1.0749, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.303347280334728, |
|
"grad_norm": 0.6228874527190459, |
|
"learning_rate": 1.75921847621561e-05, |
|
"loss": 1.0863, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.30857740585774057, |
|
"grad_norm": 0.6526952399454782, |
|
"learning_rate": 1.7472049334512354e-05, |
|
"loss": 1.0752, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3138075313807531, |
|
"grad_norm": 0.5460673752687526, |
|
"learning_rate": 1.7349421204029343e-05, |
|
"loss": 1.0838, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3190376569037657, |
|
"grad_norm": 0.5952113239646497, |
|
"learning_rate": 1.7224341279897473e-05, |
|
"loss": 1.0937, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.32426778242677823, |
|
"grad_norm": 0.6159873510239029, |
|
"learning_rate": 1.7096851289234448e-05, |
|
"loss": 1.0861, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3294979079497908, |
|
"grad_norm": 0.6664129454325257, |
|
"learning_rate": 1.6966993763164928e-05, |
|
"loss": 1.0863, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.33472803347280333, |
|
"grad_norm": 0.5500624646942581, |
|
"learning_rate": 1.6834812022632e-05, |
|
"loss": 1.0915, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3399581589958159, |
|
"grad_norm": 0.5592633657451478, |
|
"learning_rate": 1.670035016394513e-05, |
|
"loss": 1.0875, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.34518828451882844, |
|
"grad_norm": 0.5766618518095256, |
|
"learning_rate": 1.656365304406953e-05, |
|
"loss": 1.0813, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.350418410041841, |
|
"grad_norm": 0.5879084875352091, |
|
"learning_rate": 1.6424766265661648e-05, |
|
"loss": 1.084, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.35564853556485354, |
|
"grad_norm": 0.6329467200832875, |
|
"learning_rate": 1.6283736161855995e-05, |
|
"loss": 1.0772, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3608786610878661, |
|
"grad_norm": 0.5327051525622571, |
|
"learning_rate": 1.614060978080823e-05, |
|
"loss": 1.0927, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.36610878661087864, |
|
"grad_norm": 0.5576373339458268, |
|
"learning_rate": 1.5995434869999723e-05, |
|
"loss": 1.079, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3713389121338912, |
|
"grad_norm": 0.5657380130894529, |
|
"learning_rate": 1.584825986030883e-05, |
|
"loss": 1.0876, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.37656903765690375, |
|
"grad_norm": 0.5680127150496128, |
|
"learning_rate": 1.5699133849854164e-05, |
|
"loss": 1.0888, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3817991631799163, |
|
"grad_norm": 0.5721025801861312, |
|
"learning_rate": 1.5548106587615295e-05, |
|
"loss": 1.0669, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.38702928870292885, |
|
"grad_norm": 0.5558402829959199, |
|
"learning_rate": 1.5395228456836298e-05, |
|
"loss": 1.084, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3922594142259414, |
|
"grad_norm": 0.6799222607741628, |
|
"learning_rate": 1.524055045821773e-05, |
|
"loss": 1.0915, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.39748953974895396, |
|
"grad_norm": 0.6425506459433847, |
|
"learning_rate": 1.5084124192902612e-05, |
|
"loss": 1.0788, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4027196652719665, |
|
"grad_norm": 0.5897131592885358, |
|
"learning_rate": 1.4926001845262081e-05, |
|
"loss": 1.0831, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.40794979079497906, |
|
"grad_norm": 0.5398425974996183, |
|
"learning_rate": 1.4766236165486526e-05, |
|
"loss": 1.0798, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4131799163179916, |
|
"grad_norm": 0.562078925489584, |
|
"learning_rate": 1.4604880451987895e-05, |
|
"loss": 1.0763, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.41841004184100417, |
|
"grad_norm": 0.5164729530296905, |
|
"learning_rate": 1.4441988533619182e-05, |
|
"loss": 1.0735, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4236401673640167, |
|
"grad_norm": 0.5832567739541322, |
|
"learning_rate": 1.4277614751716911e-05, |
|
"loss": 1.0821, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.42887029288702927, |
|
"grad_norm": 0.5652212930411926, |
|
"learning_rate": 1.4111813941972672e-05, |
|
"loss": 1.0961, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4341004184100418, |
|
"grad_norm": 0.6010203451282251, |
|
"learning_rate": 1.3944641416139737e-05, |
|
"loss": 1.0816, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.4393305439330544, |
|
"grad_norm": 0.5630993268496297, |
|
"learning_rate": 1.3776152943580846e-05, |
|
"loss": 1.0914, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.4445606694560669, |
|
"grad_norm": 0.5502230295962955, |
|
"learning_rate": 1.3606404732663365e-05, |
|
"loss": 1.0697, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.4497907949790795, |
|
"grad_norm": 0.6076500784198701, |
|
"learning_rate": 1.3435453412007949e-05, |
|
"loss": 1.068, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.45502092050209203, |
|
"grad_norm": 0.5106137967418161, |
|
"learning_rate": 1.3263356011597045e-05, |
|
"loss": 1.0758, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4602510460251046, |
|
"grad_norm": 0.5461351720790467, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 1.0788, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.46548117154811713, |
|
"grad_norm": 0.5139632832447684, |
|
"learning_rate": 1.2915952983967477e-05, |
|
"loss": 1.0876, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4707112970711297, |
|
"grad_norm": 0.551036425577886, |
|
"learning_rate": 1.2740763251662585e-05, |
|
"loss": 1.1003, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.47594142259414224, |
|
"grad_norm": 0.6151223670580791, |
|
"learning_rate": 1.2564659190766788e-05, |
|
"loss": 1.0764, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.4811715481171548, |
|
"grad_norm": 0.5552173831481524, |
|
"learning_rate": 1.2387699550235419e-05, |
|
"loss": 1.0858, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.48640167364016734, |
|
"grad_norm": 0.4951589610620449, |
|
"learning_rate": 1.2209943364448298e-05, |
|
"loss": 1.0584, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4916317991631799, |
|
"grad_norm": 0.5475474226252068, |
|
"learning_rate": 1.2031449933515625e-05, |
|
"loss": 1.0687, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.49686192468619245, |
|
"grad_norm": 0.5486935313880071, |
|
"learning_rate": 1.1852278803495267e-05, |
|
"loss": 1.0573, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.502092050209205, |
|
"grad_norm": 0.6521150028759091, |
|
"learning_rate": 1.1672489746527979e-05, |
|
"loss": 1.0758, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5073221757322176, |
|
"grad_norm": 0.6367797364958308, |
|
"learning_rate": 1.1492142740897188e-05, |
|
"loss": 1.0677, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5125523012552301, |
|
"grad_norm": 0.5492169886313175, |
|
"learning_rate": 1.1311297951020028e-05, |
|
"loss": 1.0652, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5177824267782427, |
|
"grad_norm": 0.7238966917771047, |
|
"learning_rate": 1.1130015707376262e-05, |
|
"loss": 1.0715, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5230125523012552, |
|
"grad_norm": 0.5393341980142385, |
|
"learning_rate": 1.0948356486381829e-05, |
|
"loss": 1.0769, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5282426778242678, |
|
"grad_norm": 0.5559622163553696, |
|
"learning_rate": 1.0766380890213675e-05, |
|
"loss": 1.0806, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.5334728033472803, |
|
"grad_norm": 0.5765233869935052, |
|
"learning_rate": 1.0584149626592662e-05, |
|
"loss": 1.056, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5387029288702929, |
|
"grad_norm": 0.6006558028355857, |
|
"learning_rate": 1.0401723488531238e-05, |
|
"loss": 1.0682, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.5439330543933054, |
|
"grad_norm": 0.5396974095022056, |
|
"learning_rate": 1.0219163334052682e-05, |
|
"loss": 1.0817, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.549163179916318, |
|
"grad_norm": 0.6140554510099683, |
|
"learning_rate": 1.003653006588863e-05, |
|
"loss": 1.0757, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5543933054393305, |
|
"grad_norm": 0.5087617662738954, |
|
"learning_rate": 9.853884611161709e-06, |
|
"loss": 1.047, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5596234309623431, |
|
"grad_norm": 0.5112466701463289, |
|
"learning_rate": 9.671287901060035e-06, |
|
"loss": 1.064, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5648535564853556, |
|
"grad_norm": 0.5633974067279639, |
|
"learning_rate": 9.48880085051033e-06, |
|
"loss": 1.0734, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5700836820083682, |
|
"grad_norm": 0.5557468531733452, |
|
"learning_rate": 9.306484337856497e-06, |
|
"loss": 1.0777, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5753138075313807, |
|
"grad_norm": 0.5271151067730627, |
|
"learning_rate": 9.124399184550377e-06, |
|
"loss": 1.0817, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5805439330543933, |
|
"grad_norm": 0.5448740669834791, |
|
"learning_rate": 8.94260613486149e-06, |
|
"loss": 1.0757, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5857740585774058, |
|
"grad_norm": 0.5155325719909827, |
|
"learning_rate": 8.76116583561252e-06, |
|
"loss": 1.0589, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5910041841004184, |
|
"grad_norm": 0.5378614772721012, |
|
"learning_rate": 8.580138815947336e-06, |
|
"loss": 1.0651, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5962343096234309, |
|
"grad_norm": 0.6326045695053654, |
|
"learning_rate": 8.399585467138215e-06, |
|
"loss": 1.0763, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6014644351464435, |
|
"grad_norm": 0.6213989271242717, |
|
"learning_rate": 8.219566022439128e-06, |
|
"loss": 1.0792, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.606694560669456, |
|
"grad_norm": 0.5547238276934333, |
|
"learning_rate": 8.040140536991688e-06, |
|
"loss": 1.0767, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.6119246861924686, |
|
"grad_norm": 0.6071589445644577, |
|
"learning_rate": 7.861368867790541e-06, |
|
"loss": 1.0792, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.6171548117154811, |
|
"grad_norm": 0.5293708637565184, |
|
"learning_rate": 7.683310653714857e-06, |
|
"loss": 1.0723, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6223849372384938, |
|
"grad_norm": 0.6845571370634647, |
|
"learning_rate": 7.506025295632577e-06, |
|
"loss": 1.0646, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.6276150627615062, |
|
"grad_norm": 0.5398870120099946, |
|
"learning_rate": 7.329571936584072e-06, |
|
"loss": 1.0566, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6328451882845189, |
|
"grad_norm": 0.5549729687939188, |
|
"learning_rate": 7.154009442051813e-06, |
|
"loss": 1.0562, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.6380753138075314, |
|
"grad_norm": 0.5739399359103224, |
|
"learning_rate": 6.979396380322621e-06, |
|
"loss": 1.0744, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.643305439330544, |
|
"grad_norm": 0.5432529949624418, |
|
"learning_rate": 6.805791002949085e-06, |
|
"loss": 1.0688, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6485355648535565, |
|
"grad_norm": 0.5014458185904755, |
|
"learning_rate": 6.63325122531663e-06, |
|
"loss": 1.0541, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6537656903765691, |
|
"grad_norm": 0.5197776266160257, |
|
"learning_rate": 6.461834607322719e-06, |
|
"loss": 1.0547, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6589958158995816, |
|
"grad_norm": 0.5327187159562262, |
|
"learning_rate": 6.291598334174685e-06, |
|
"loss": 1.0572, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6642259414225942, |
|
"grad_norm": 0.4979473409535948, |
|
"learning_rate": 6.122599197312525e-06, |
|
"loss": 1.0858, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6694560669456067, |
|
"grad_norm": 0.5036476989509128, |
|
"learning_rate": 5.954893575463064e-06, |
|
"loss": 1.0637, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6746861924686193, |
|
"grad_norm": 0.5228587745934291, |
|
"learning_rate": 5.788537415831825e-06, |
|
"loss": 1.055, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6799163179916318, |
|
"grad_norm": 0.501253029293146, |
|
"learning_rate": 5.623586215438813e-06, |
|
"loss": 1.0641, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6851464435146444, |
|
"grad_norm": 0.5050457318096031, |
|
"learning_rate": 5.460095002604533e-06, |
|
"loss": 1.0477, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6903765690376569, |
|
"grad_norm": 0.5360440296456983, |
|
"learning_rate": 5.298118318592316e-06, |
|
"loss": 1.0654, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6956066945606695, |
|
"grad_norm": 0.50207454781218, |
|
"learning_rate": 5.137710199413182e-06, |
|
"loss": 1.0685, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.700836820083682, |
|
"grad_norm": 0.558550005312336, |
|
"learning_rate": 4.978924157799208e-06, |
|
"loss": 1.06, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7060669456066946, |
|
"grad_norm": 0.48483916829543067, |
|
"learning_rate": 4.821813165351495e-06, |
|
"loss": 1.0581, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7112970711297071, |
|
"grad_norm": 0.49927440612478785, |
|
"learning_rate": 4.666429634868651e-06, |
|
"loss": 1.0851, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.7165271966527197, |
|
"grad_norm": 0.48707052941667256, |
|
"learning_rate": 4.512825402861686e-06, |
|
"loss": 1.0552, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.7217573221757322, |
|
"grad_norm": 0.5683679089994613, |
|
"learning_rate": 4.361051712261173e-06, |
|
"loss": 1.0596, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7269874476987448, |
|
"grad_norm": 0.5340603274742174, |
|
"learning_rate": 4.211159195322404e-06, |
|
"loss": 1.0501, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7322175732217573, |
|
"grad_norm": 0.5206965463042575, |
|
"learning_rate": 4.063197856734295e-06, |
|
"loss": 1.0783, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7374476987447699, |
|
"grad_norm": 0.5029185927153098, |
|
"learning_rate": 3.917217056937649e-06, |
|
"loss": 1.0636, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.7426778242677824, |
|
"grad_norm": 0.5154613274025447, |
|
"learning_rate": 3.773265495658309e-06, |
|
"loss": 1.0655, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.747907949790795, |
|
"grad_norm": 0.4955231762184273, |
|
"learning_rate": 3.6313911956607827e-06, |
|
"loss": 1.0853, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.7531380753138075, |
|
"grad_norm": 0.513151209360854, |
|
"learning_rate": 3.491641486727645e-06, |
|
"loss": 1.0684, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7583682008368201, |
|
"grad_norm": 0.5327297461286351, |
|
"learning_rate": 3.354062989870166e-06, |
|
"loss": 1.0504, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7635983263598326, |
|
"grad_norm": 0.5121990852058834, |
|
"learning_rate": 3.2187016017753714e-06, |
|
"loss": 1.066, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7688284518828452, |
|
"grad_norm": 0.5203341394376972, |
|
"learning_rate": 3.085602479494735e-06, |
|
"loss": 1.0602, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7740585774058577, |
|
"grad_norm": 0.5005436050566201, |
|
"learning_rate": 2.954810025379633e-06, |
|
"loss": 1.071, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7792887029288703, |
|
"grad_norm": 0.502346137505219, |
|
"learning_rate": 2.826367872268543e-06, |
|
"loss": 1.0517, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.7845188284518828, |
|
"grad_norm": 0.5272314894368499, |
|
"learning_rate": 2.700318868930977e-06, |
|
"loss": 1.0683, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7897489539748954, |
|
"grad_norm": 0.5292107880965233, |
|
"learning_rate": 2.576705065772985e-06, |
|
"loss": 1.0543, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7949790794979079, |
|
"grad_norm": 0.4869311193186611, |
|
"learning_rate": 2.455567700808974e-06, |
|
"loss": 1.0437, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.8002092050209205, |
|
"grad_norm": 0.5199956099665775, |
|
"learning_rate": 2.3369471859045558e-06, |
|
"loss": 1.0637, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.805439330543933, |
|
"grad_norm": 0.5041918826572614, |
|
"learning_rate": 2.2208830932950175e-06, |
|
"loss": 1.0557, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8106694560669456, |
|
"grad_norm": 0.4992808348206519, |
|
"learning_rate": 2.107414142383857e-06, |
|
"loss": 1.0636, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.8158995815899581, |
|
"grad_norm": 0.5351058053191133, |
|
"learning_rate": 1.996578186825876e-06, |
|
"loss": 1.0652, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8211297071129707, |
|
"grad_norm": 0.5071325680716784, |
|
"learning_rate": 1.8884122018990292e-06, |
|
"loss": 1.0575, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.8263598326359832, |
|
"grad_norm": 0.5070573073765244, |
|
"learning_rate": 1.7829522721693738e-06, |
|
"loss": 1.0476, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.8315899581589958, |
|
"grad_norm": 0.5283232962970443, |
|
"learning_rate": 1.680233579453101e-06, |
|
"loss": 1.0604, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8368200836820083, |
|
"grad_norm": 0.500454736041983, |
|
"learning_rate": 1.5802903910797584e-06, |
|
"loss": 1.0623, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8420502092050209, |
|
"grad_norm": 0.4991364513451987, |
|
"learning_rate": 1.483156048460561e-06, |
|
"loss": 1.0702, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.8472803347280334, |
|
"grad_norm": 0.5156696669086099, |
|
"learning_rate": 1.3888629559655497e-06, |
|
"loss": 1.065, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.852510460251046, |
|
"grad_norm": 0.495859957636518, |
|
"learning_rate": 1.2974425701133941e-06, |
|
"loss": 1.0519, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.8577405857740585, |
|
"grad_norm": 0.506274711437886, |
|
"learning_rate": 1.2089253890773789e-06, |
|
"loss": 1.0592, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8629707112970711, |
|
"grad_norm": 0.49088086705348327, |
|
"learning_rate": 1.1233409425110952e-06, |
|
"loss": 1.0562, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8682008368200836, |
|
"grad_norm": 0.4880905900442435, |
|
"learning_rate": 1.0407177816972558e-06, |
|
"loss": 1.0469, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8734309623430963, |
|
"grad_norm": 0.5013767019673636, |
|
"learning_rate": 9.610834700228654e-07, |
|
"loss": 1.0776, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.8786610878661087, |
|
"grad_norm": 0.5040841468024664, |
|
"learning_rate": 8.844645737839874e-07, |
|
"loss": 1.0568, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8838912133891214, |
|
"grad_norm": 0.4758757276308106, |
|
"learning_rate": 8.108866533231252e-07, |
|
"loss": 1.0427, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.8891213389121339, |
|
"grad_norm": 0.5126814725002861, |
|
"learning_rate": 7.403742545021986e-07, |
|
"loss": 1.0559, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8943514644351465, |
|
"grad_norm": 0.4987765646224313, |
|
"learning_rate": 6.729509005139634e-07, |
|
"loss": 1.0415, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.899581589958159, |
|
"grad_norm": 0.5014454129425115, |
|
"learning_rate": 6.086390840345758e-07, |
|
"loss": 1.05, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.9048117154811716, |
|
"grad_norm": 0.48975109580877996, |
|
"learning_rate": 5.474602597199607e-07, |
|
"loss": 1.0704, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.9100418410041841, |
|
"grad_norm": 0.4994701501642807, |
|
"learning_rate": 4.894348370484648e-07, |
|
"loss": 1.0691, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.9152719665271967, |
|
"grad_norm": 0.521731055642247, |
|
"learning_rate": 4.34582173512168e-07, |
|
"loss": 1.0677, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.9205020920502092, |
|
"grad_norm": 0.4885441287488048, |
|
"learning_rate": 3.8292056815916965e-07, |
|
"loss": 1.059, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.9257322175732218, |
|
"grad_norm": 0.5068844610447545, |
|
"learning_rate": 3.344672554889428e-07, |
|
"loss": 1.0691, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.9309623430962343, |
|
"grad_norm": 0.49577008576644055, |
|
"learning_rate": 2.8923839970285473e-07, |
|
"loss": 1.0592, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9361924686192469, |
|
"grad_norm": 0.4907561902253791, |
|
"learning_rate": 2.472490893117274e-07, |
|
"loss": 1.0458, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.9414225941422594, |
|
"grad_norm": 0.5000871009012277, |
|
"learning_rate": 2.0851333210225032e-07, |
|
"loss": 1.0608, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.946652719665272, |
|
"grad_norm": 0.48247289512404495, |
|
"learning_rate": 1.730440504639408e-07, |
|
"loss": 1.058, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.9518828451882845, |
|
"grad_norm": 0.48826128008832265, |
|
"learning_rate": 1.408530770781813e-07, |
|
"loss": 1.0526, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9571129707112971, |
|
"grad_norm": 0.4969532853804826, |
|
"learning_rate": 1.1195115097079268e-07, |
|
"loss": 1.0685, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.9623430962343096, |
|
"grad_norm": 0.48920579432423383, |
|
"learning_rate": 8.634791392946429e-08, |
|
"loss": 1.0676, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9675732217573222, |
|
"grad_norm": 0.49070804064035406, |
|
"learning_rate": 6.405190728721033e-08, |
|
"loss": 1.0455, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.9728033472803347, |
|
"grad_norm": 0.47721070672447036, |
|
"learning_rate": 4.5070569072952485e-08, |
|
"loss": 1.061, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9780334728033473, |
|
"grad_norm": 0.4960618241854113, |
|
"learning_rate": 2.9410231530168087e-08, |
|
"loss": 1.0499, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.9832635983263598, |
|
"grad_norm": 0.4727473862892527, |
|
"learning_rate": 1.7076119004429958e-08, |
|
"loss": 1.0763, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9884937238493724, |
|
"grad_norm": 0.4932911494908516, |
|
"learning_rate": 8.072346200544979e-09, |
|
"loss": 1.0672, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.9937238493723849, |
|
"grad_norm": 0.48937555835484964, |
|
"learning_rate": 2.401916809872118e-09, |
|
"loss": 1.0629, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9989539748953975, |
|
"grad_norm": 0.47678060880513334, |
|
"learning_rate": 6.672250828620996e-11, |
|
"loss": 1.0626, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.0645579099655151, |
|
"eval_runtime": 287.2685, |
|
"eval_samples_per_second": 47.13, |
|
"eval_steps_per_second": 0.738, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 956, |
|
"total_flos": 500925122740224.0, |
|
"train_loss": 1.0766646904426638, |
|
"train_runtime": 10839.8443, |
|
"train_samples_per_second": 11.286, |
|
"train_steps_per_second": 0.088 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 956, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 500925122740224.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|