terry69's picture
Model save
2016948 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9993805492463349,
"eval_steps": 500,
"global_step": 1210,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008259343382201115,
"grad_norm": 23.24616747351659,
"learning_rate": 8.264462809917357e-08,
"loss": 1.4263,
"step": 1
},
{
"epoch": 0.0041296716911005575,
"grad_norm": 24.213699866017716,
"learning_rate": 4.132231404958678e-07,
"loss": 1.4129,
"step": 5
},
{
"epoch": 0.008259343382201115,
"grad_norm": 8.707304164936993,
"learning_rate": 8.264462809917356e-07,
"loss": 1.3289,
"step": 10
},
{
"epoch": 0.012389015073301672,
"grad_norm": 10.918691302163335,
"learning_rate": 1.2396694214876035e-06,
"loss": 1.1721,
"step": 15
},
{
"epoch": 0.01651868676440223,
"grad_norm": 2.87174085951705,
"learning_rate": 1.6528925619834712e-06,
"loss": 1.0219,
"step": 20
},
{
"epoch": 0.020648358455502787,
"grad_norm": 3.071874166303048,
"learning_rate": 2.066115702479339e-06,
"loss": 0.9724,
"step": 25
},
{
"epoch": 0.024778030146603344,
"grad_norm": 2.4133173682706213,
"learning_rate": 2.479338842975207e-06,
"loss": 0.9426,
"step": 30
},
{
"epoch": 0.028907701837703904,
"grad_norm": 2.236585592513117,
"learning_rate": 2.8925619834710743e-06,
"loss": 0.921,
"step": 35
},
{
"epoch": 0.03303737352880446,
"grad_norm": 2.385522667127509,
"learning_rate": 3.3057851239669424e-06,
"loss": 0.9121,
"step": 40
},
{
"epoch": 0.03716704521990502,
"grad_norm": 2.380746269584164,
"learning_rate": 3.71900826446281e-06,
"loss": 0.9001,
"step": 45
},
{
"epoch": 0.041296716911005574,
"grad_norm": 2.569197332815026,
"learning_rate": 4.132231404958678e-06,
"loss": 0.8867,
"step": 50
},
{
"epoch": 0.045426388602106134,
"grad_norm": 2.3723293989848213,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.8707,
"step": 55
},
{
"epoch": 0.04955606029320669,
"grad_norm": 2.3355520193379142,
"learning_rate": 4.958677685950414e-06,
"loss": 0.8698,
"step": 60
},
{
"epoch": 0.05368573198430725,
"grad_norm": 2.375730929200025,
"learning_rate": 5.371900826446281e-06,
"loss": 0.859,
"step": 65
},
{
"epoch": 0.05781540367540781,
"grad_norm": 2.3481213761418838,
"learning_rate": 5.785123966942149e-06,
"loss": 0.8632,
"step": 70
},
{
"epoch": 0.06194507536650836,
"grad_norm": 2.3161171161194254,
"learning_rate": 6.198347107438017e-06,
"loss": 0.8462,
"step": 75
},
{
"epoch": 0.06607474705760892,
"grad_norm": 2.273903126512892,
"learning_rate": 6.611570247933885e-06,
"loss": 0.8593,
"step": 80
},
{
"epoch": 0.07020441874870947,
"grad_norm": 2.3651082830117787,
"learning_rate": 7.0247933884297525e-06,
"loss": 0.8388,
"step": 85
},
{
"epoch": 0.07433409043981004,
"grad_norm": 2.5780580351650584,
"learning_rate": 7.43801652892562e-06,
"loss": 0.8354,
"step": 90
},
{
"epoch": 0.0784637621309106,
"grad_norm": 2.5881003929304165,
"learning_rate": 7.851239669421489e-06,
"loss": 0.8177,
"step": 95
},
{
"epoch": 0.08259343382201115,
"grad_norm": 2.3662580429191262,
"learning_rate": 8.264462809917356e-06,
"loss": 0.8397,
"step": 100
},
{
"epoch": 0.0867231055131117,
"grad_norm": 2.4549245207596173,
"learning_rate": 8.677685950413224e-06,
"loss": 0.8209,
"step": 105
},
{
"epoch": 0.09085277720421227,
"grad_norm": 2.6986718605229454,
"learning_rate": 9.090909090909091e-06,
"loss": 0.8076,
"step": 110
},
{
"epoch": 0.09498244889531282,
"grad_norm": 2.6597400001383136,
"learning_rate": 9.50413223140496e-06,
"loss": 0.812,
"step": 115
},
{
"epoch": 0.09911212058641337,
"grad_norm": 2.6969489551453703,
"learning_rate": 9.917355371900828e-06,
"loss": 0.8064,
"step": 120
},
{
"epoch": 0.10324179227751394,
"grad_norm": 2.5801588887536204,
"learning_rate": 9.999667111219573e-06,
"loss": 0.7993,
"step": 125
},
{
"epoch": 0.1073714639686145,
"grad_norm": 2.5177823470227314,
"learning_rate": 9.998314826517564e-06,
"loss": 0.8123,
"step": 130
},
{
"epoch": 0.11150113565971505,
"grad_norm": 2.3846369879591767,
"learning_rate": 9.995922621477252e-06,
"loss": 0.8064,
"step": 135
},
{
"epoch": 0.11563080735081561,
"grad_norm": 2.37259551970217,
"learning_rate": 9.99249099380692e-06,
"loss": 0.7961,
"step": 140
},
{
"epoch": 0.11976047904191617,
"grad_norm": 2.1621998588898346,
"learning_rate": 9.988020657471078e-06,
"loss": 0.7924,
"step": 145
},
{
"epoch": 0.12389015073301672,
"grad_norm": 2.3342111944519623,
"learning_rate": 9.98251254254193e-06,
"loss": 0.7785,
"step": 150
},
{
"epoch": 0.12801982242411727,
"grad_norm": 2.3550905722953823,
"learning_rate": 9.97596779500586e-06,
"loss": 0.7695,
"step": 155
},
{
"epoch": 0.13214949411521784,
"grad_norm": 2.4721774789219344,
"learning_rate": 9.968387776525009e-06,
"loss": 0.7777,
"step": 160
},
{
"epoch": 0.1362791658063184,
"grad_norm": 2.390765566890305,
"learning_rate": 9.959774064153977e-06,
"loss": 0.7865,
"step": 165
},
{
"epoch": 0.14040883749741895,
"grad_norm": 2.392413139550164,
"learning_rate": 9.950128450011706e-06,
"loss": 0.7723,
"step": 170
},
{
"epoch": 0.14453850918851952,
"grad_norm": 2.24515023866806,
"learning_rate": 9.939452940908627e-06,
"loss": 0.755,
"step": 175
},
{
"epoch": 0.14866818087962008,
"grad_norm": 2.546933963480071,
"learning_rate": 9.927749757929125e-06,
"loss": 0.7538,
"step": 180
},
{
"epoch": 0.15279785257072062,
"grad_norm": 2.5390977331021136,
"learning_rate": 9.915021335969452e-06,
"loss": 0.7436,
"step": 185
},
{
"epoch": 0.1569275242618212,
"grad_norm": 2.435644472976208,
"learning_rate": 9.901270323231114e-06,
"loss": 0.7423,
"step": 190
},
{
"epoch": 0.16105719595292176,
"grad_norm": 2.3962427953474794,
"learning_rate": 9.886499580669917e-06,
"loss": 0.7402,
"step": 195
},
{
"epoch": 0.1651868676440223,
"grad_norm": 2.293192651317426,
"learning_rate": 9.870712181400726e-06,
"loss": 0.7328,
"step": 200
},
{
"epoch": 0.16931653933512286,
"grad_norm": 2.3625939065789523,
"learning_rate": 9.853911410058097e-06,
"loss": 0.7414,
"step": 205
},
{
"epoch": 0.1734462110262234,
"grad_norm": 2.1188762141799633,
"learning_rate": 9.836100762112887e-06,
"loss": 0.7324,
"step": 210
},
{
"epoch": 0.17757588271732397,
"grad_norm": 2.3459257676868885,
"learning_rate": 9.817283943145014e-06,
"loss": 0.7216,
"step": 215
},
{
"epoch": 0.18170555440842454,
"grad_norm": 2.353468899573234,
"learning_rate": 9.797464868072489e-06,
"loss": 0.7244,
"step": 220
},
{
"epoch": 0.18583522609952507,
"grad_norm": 2.204966096546786,
"learning_rate": 9.776647660336905e-06,
"loss": 0.7193,
"step": 225
},
{
"epoch": 0.18996489779062564,
"grad_norm": 2.310006498983248,
"learning_rate": 9.754836651045538e-06,
"loss": 0.7161,
"step": 230
},
{
"epoch": 0.1940945694817262,
"grad_norm": 2.5104871936969277,
"learning_rate": 9.732036378070243e-06,
"loss": 0.7044,
"step": 235
},
{
"epoch": 0.19822424117282675,
"grad_norm": 2.366571213288955,
"learning_rate": 9.708251585103324e-06,
"loss": 0.7004,
"step": 240
},
{
"epoch": 0.20235391286392732,
"grad_norm": 2.570331809825131,
"learning_rate": 9.683487220670595e-06,
"loss": 0.7158,
"step": 245
},
{
"epoch": 0.20648358455502788,
"grad_norm": 2.4620380694723147,
"learning_rate": 9.657748437101819e-06,
"loss": 0.7019,
"step": 250
},
{
"epoch": 0.21061325624612842,
"grad_norm": 2.3314886422942513,
"learning_rate": 9.631040589458742e-06,
"loss": 0.6989,
"step": 255
},
{
"epoch": 0.214742927937229,
"grad_norm": 2.2564618217324743,
"learning_rate": 9.603369234420944e-06,
"loss": 0.6947,
"step": 260
},
{
"epoch": 0.21887259962832956,
"grad_norm": 2.2447803221031903,
"learning_rate": 9.574740129129767e-06,
"loss": 0.6756,
"step": 265
},
{
"epoch": 0.2230022713194301,
"grad_norm": 2.590402933040932,
"learning_rate": 9.545159229990493e-06,
"loss": 0.6795,
"step": 270
},
{
"epoch": 0.22713194301053066,
"grad_norm": 2.3017458472761634,
"learning_rate": 9.514632691433108e-06,
"loss": 0.6905,
"step": 275
},
{
"epoch": 0.23126161470163123,
"grad_norm": 2.363184146884425,
"learning_rate": 9.483166864631837e-06,
"loss": 0.6572,
"step": 280
},
{
"epoch": 0.23539128639273177,
"grad_norm": 2.531694092570608,
"learning_rate": 9.450768296183764e-06,
"loss": 0.6694,
"step": 285
},
{
"epoch": 0.23952095808383234,
"grad_norm": 2.3202550588633017,
"learning_rate": 9.417443726746776e-06,
"loss": 0.6704,
"step": 290
},
{
"epoch": 0.2436506297749329,
"grad_norm": 2.3120309970976685,
"learning_rate": 9.383200089637143e-06,
"loss": 0.6572,
"step": 295
},
{
"epoch": 0.24778030146603344,
"grad_norm": 2.3498499961336687,
"learning_rate": 9.348044509387022e-06,
"loss": 0.6655,
"step": 300
},
{
"epoch": 0.251909973157134,
"grad_norm": 2.5440021629079665,
"learning_rate": 9.311984300262151e-06,
"loss": 0.6612,
"step": 305
},
{
"epoch": 0.25603964484823455,
"grad_norm": 2.358901339481344,
"learning_rate": 9.275026964740101e-06,
"loss": 0.6444,
"step": 310
},
{
"epoch": 0.2601693165393351,
"grad_norm": 2.206937338630286,
"learning_rate": 9.237180191949347e-06,
"loss": 0.6445,
"step": 315
},
{
"epoch": 0.2642989882304357,
"grad_norm": 2.226748345548228,
"learning_rate": 9.198451856069514e-06,
"loss": 0.6474,
"step": 320
},
{
"epoch": 0.26842865992153625,
"grad_norm": 2.366603978699467,
"learning_rate": 9.158850014693123e-06,
"loss": 0.6361,
"step": 325
},
{
"epoch": 0.2725583316126368,
"grad_norm": 2.2830136404441657,
"learning_rate": 9.118382907149164e-06,
"loss": 0.6395,
"step": 330
},
{
"epoch": 0.27668800330373733,
"grad_norm": 2.4029340921426727,
"learning_rate": 9.077058952788888e-06,
"loss": 0.6379,
"step": 335
},
{
"epoch": 0.2808176749948379,
"grad_norm": 2.4477206203283512,
"learning_rate": 9.034886749234112e-06,
"loss": 0.6272,
"step": 340
},
{
"epoch": 0.28494734668593846,
"grad_norm": 2.3054439086007243,
"learning_rate": 8.991875070588449e-06,
"loss": 0.6279,
"step": 345
},
{
"epoch": 0.28907701837703903,
"grad_norm": 2.2372770463589196,
"learning_rate": 8.948032865611823e-06,
"loss": 0.6276,
"step": 350
},
{
"epoch": 0.2932066900681396,
"grad_norm": 2.2567729960946803,
"learning_rate": 8.90336925585864e-06,
"loss": 0.6107,
"step": 355
},
{
"epoch": 0.29733636175924016,
"grad_norm": 2.2634696566360697,
"learning_rate": 8.857893533780015e-06,
"loss": 0.6222,
"step": 360
},
{
"epoch": 0.3014660334503407,
"grad_norm": 2.2158775605445573,
"learning_rate": 8.811615160790428e-06,
"loss": 0.6099,
"step": 365
},
{
"epoch": 0.30559570514144124,
"grad_norm": 2.359375453911521,
"learning_rate": 8.764543765299245e-06,
"loss": 0.6088,
"step": 370
},
{
"epoch": 0.3097253768325418,
"grad_norm": 2.2574003131115017,
"learning_rate": 8.716689140707488e-06,
"loss": 0.5978,
"step": 375
},
{
"epoch": 0.3138550485236424,
"grad_norm": 2.305475140301284,
"learning_rate": 8.668061243370273e-06,
"loss": 0.5955,
"step": 380
},
{
"epoch": 0.31798472021474294,
"grad_norm": 2.512743372685998,
"learning_rate": 8.61867019052535e-06,
"loss": 0.6035,
"step": 385
},
{
"epoch": 0.3221143919058435,
"grad_norm": 2.407216931553165,
"learning_rate": 8.568526258188172e-06,
"loss": 0.5836,
"step": 390
},
{
"epoch": 0.326244063596944,
"grad_norm": 2.2192688797123066,
"learning_rate": 8.517639879013918e-06,
"loss": 0.5888,
"step": 395
},
{
"epoch": 0.3303737352880446,
"grad_norm": 2.1358480481396267,
"learning_rate": 8.466021640126946e-06,
"loss": 0.5916,
"step": 400
},
{
"epoch": 0.33450340697914516,
"grad_norm": 2.1925271239115034,
"learning_rate": 8.413682280918094e-06,
"loss": 0.577,
"step": 405
},
{
"epoch": 0.3386330786702457,
"grad_norm": 2.2104260296514613,
"learning_rate": 8.3606326908103e-06,
"loss": 0.5921,
"step": 410
},
{
"epoch": 0.3427627503613463,
"grad_norm": 2.3351708112925946,
"learning_rate": 8.306883906993022e-06,
"loss": 0.5664,
"step": 415
},
{
"epoch": 0.3468924220524468,
"grad_norm": 2.129763886062084,
"learning_rate": 8.25244711212589e-06,
"loss": 0.575,
"step": 420
},
{
"epoch": 0.35102209374354737,
"grad_norm": 2.210580574947652,
"learning_rate": 8.197333632012123e-06,
"loss": 0.5797,
"step": 425
},
{
"epoch": 0.35515176543464794,
"grad_norm": 2.3295885927377387,
"learning_rate": 8.141554933242135e-06,
"loss": 0.569,
"step": 430
},
{
"epoch": 0.3592814371257485,
"grad_norm": 2.332203260771466,
"learning_rate": 8.08512262080787e-06,
"loss": 0.573,
"step": 435
},
{
"epoch": 0.36341110881684907,
"grad_norm": 2.13902847391916,
"learning_rate": 8.028048435688333e-06,
"loss": 0.5788,
"step": 440
},
{
"epoch": 0.36754078050794964,
"grad_norm": 2.3592904701751145,
"learning_rate": 7.970344252406832e-06,
"loss": 0.569,
"step": 445
},
{
"epoch": 0.37167045219905015,
"grad_norm": 2.4900138472719617,
"learning_rate": 7.912022076560426e-06,
"loss": 0.5697,
"step": 450
},
{
"epoch": 0.3758001238901507,
"grad_norm": 2.1966947004630826,
"learning_rate": 7.853094042322121e-06,
"loss": 0.5577,
"step": 455
},
{
"epoch": 0.3799297955812513,
"grad_norm": 2.4924992867127,
"learning_rate": 7.7935724099163e-06,
"loss": 0.5564,
"step": 460
},
{
"epoch": 0.38405946727235185,
"grad_norm": 2.3168755091791775,
"learning_rate": 7.733469563067928e-06,
"loss": 0.558,
"step": 465
},
{
"epoch": 0.3881891389634524,
"grad_norm": 2.090377992450065,
"learning_rate": 7.67279800642607e-06,
"loss": 0.5472,
"step": 470
},
{
"epoch": 0.392318810654553,
"grad_norm": 2.19832848896829,
"learning_rate": 7.611570362962247e-06,
"loss": 0.547,
"step": 475
},
{
"epoch": 0.3964484823456535,
"grad_norm": 2.139436532548822,
"learning_rate": 7.549799371344175e-06,
"loss": 0.5376,
"step": 480
},
{
"epoch": 0.40057815403675406,
"grad_norm": 2.1648898845336304,
"learning_rate": 7.487497883285428e-06,
"loss": 0.5435,
"step": 485
},
{
"epoch": 0.40470782572785463,
"grad_norm": 2.1434615200482403,
"learning_rate": 7.424678860871584e-06,
"loss": 0.5347,
"step": 490
},
{
"epoch": 0.4088374974189552,
"grad_norm": 2.028970353253071,
"learning_rate": 7.361355373863415e-06,
"loss": 0.5349,
"step": 495
},
{
"epoch": 0.41296716911005577,
"grad_norm": 2.1772432202945624,
"learning_rate": 7.297540596977663e-06,
"loss": 0.532,
"step": 500
},
{
"epoch": 0.41709684080115633,
"grad_norm": 2.2430530701967832,
"learning_rate": 7.233247807145989e-06,
"loss": 0.5385,
"step": 505
},
{
"epoch": 0.42122651249225684,
"grad_norm": 2.0619754497698035,
"learning_rate": 7.168490380752648e-06,
"loss": 0.5349,
"step": 510
},
{
"epoch": 0.4253561841833574,
"grad_norm": 2.1855473382543993,
"learning_rate": 7.103281790851483e-06,
"loss": 0.5233,
"step": 515
},
{
"epoch": 0.429485855874458,
"grad_norm": 2.2538669231669712,
"learning_rate": 7.037635604362786e-06,
"loss": 0.5283,
"step": 520
},
{
"epoch": 0.43361552756555855,
"grad_norm": 2.2135151111234794,
"learning_rate": 6.971565479250659e-06,
"loss": 0.5167,
"step": 525
},
{
"epoch": 0.4377451992566591,
"grad_norm": 2.383590881907924,
"learning_rate": 6.905085161681408e-06,
"loss": 0.5219,
"step": 530
},
{
"epoch": 0.4418748709477597,
"grad_norm": 2.3721647738464258,
"learning_rate": 6.838208483163601e-06,
"loss": 0.5174,
"step": 535
},
{
"epoch": 0.4460045426388602,
"grad_norm": 2.338909227744761,
"learning_rate": 6.770949357670358e-06,
"loss": 0.5273,
"step": 540
},
{
"epoch": 0.45013421432996076,
"grad_norm": 2.1495131685935163,
"learning_rate": 6.703321778744495e-06,
"loss": 0.5186,
"step": 545
},
{
"epoch": 0.4542638860210613,
"grad_norm": 2.010655198436141,
"learning_rate": 6.635339816587109e-06,
"loss": 0.5164,
"step": 550
},
{
"epoch": 0.4583935577121619,
"grad_norm": 2.1095925263837385,
"learning_rate": 6.5670176151302136e-06,
"loss": 0.4991,
"step": 555
},
{
"epoch": 0.46252322940326246,
"grad_norm": 2.2264319027764383,
"learning_rate": 6.4983693890940335e-06,
"loss": 0.5195,
"step": 560
},
{
"epoch": 0.46665290109436297,
"grad_norm": 2.1349760132456246,
"learning_rate": 6.4294094210295725e-06,
"loss": 0.504,
"step": 565
},
{
"epoch": 0.47078257278546354,
"grad_norm": 2.2896775214795033,
"learning_rate": 6.360152058347068e-06,
"loss": 0.5078,
"step": 570
},
{
"epoch": 0.4749122444765641,
"grad_norm": 2.1221923536777747,
"learning_rate": 6.290611710330957e-06,
"loss": 0.5059,
"step": 575
},
{
"epoch": 0.47904191616766467,
"grad_norm": 2.1068836258621406,
"learning_rate": 6.2208028451419575e-06,
"loss": 0.5083,
"step": 580
},
{
"epoch": 0.48317158785876524,
"grad_norm": 2.0972952063729475,
"learning_rate": 6.150739986806911e-06,
"loss": 0.4923,
"step": 585
},
{
"epoch": 0.4873012595498658,
"grad_norm": 2.233487162462348,
"learning_rate": 6.0804377121969985e-06,
"loss": 0.4953,
"step": 590
},
{
"epoch": 0.4914309312409663,
"grad_norm": 2.108166108174955,
"learning_rate": 6.009910647994956e-06,
"loss": 0.4918,
"step": 595
},
{
"epoch": 0.4955606029320669,
"grad_norm": 2.055777194538188,
"learning_rate": 5.939173467651942e-06,
"loss": 0.483,
"step": 600
},
{
"epoch": 0.49969027462316745,
"grad_norm": 2.1437965393063374,
"learning_rate": 5.8682408883346535e-06,
"loss": 0.4847,
"step": 605
},
{
"epoch": 0.503819946314268,
"grad_norm": 2.019446515641709,
"learning_rate": 5.7971276678633625e-06,
"loss": 0.4935,
"step": 610
},
{
"epoch": 0.5079496180053685,
"grad_norm": 2.1084072043678894,
"learning_rate": 5.725848601641492e-06,
"loss": 0.4736,
"step": 615
},
{
"epoch": 0.5120792896964691,
"grad_norm": 2.185992748799692,
"learning_rate": 5.654418519577369e-06,
"loss": 0.4743,
"step": 620
},
{
"epoch": 0.5162089613875697,
"grad_norm": 2.0909394304010416,
"learning_rate": 5.5828522829987965e-06,
"loss": 0.4808,
"step": 625
},
{
"epoch": 0.5203386330786702,
"grad_norm": 2.058129072039577,
"learning_rate": 5.511164781561096e-06,
"loss": 0.4727,
"step": 630
},
{
"epoch": 0.5244683047697708,
"grad_norm": 2.1781635883341837,
"learning_rate": 5.439370930149252e-06,
"loss": 0.4665,
"step": 635
},
{
"epoch": 0.5285979764608714,
"grad_norm": 2.132119418871688,
"learning_rate": 5.367485665774802e-06,
"loss": 0.4647,
"step": 640
},
{
"epoch": 0.5327276481519719,
"grad_norm": 2.0923424452918953,
"learning_rate": 5.295523944468137e-06,
"loss": 0.4752,
"step": 645
},
{
"epoch": 0.5368573198430725,
"grad_norm": 2.1500817775070704,
"learning_rate": 5.223500738166837e-06,
"loss": 0.4653,
"step": 650
},
{
"epoch": 0.5409869915341731,
"grad_norm": 2.3057702591417764,
"learning_rate": 5.1514310316006835e-06,
"loss": 0.459,
"step": 655
},
{
"epoch": 0.5451166632252736,
"grad_norm": 2.027442727638895,
"learning_rate": 5.07932981917404e-06,
"loss": 0.467,
"step": 660
},
{
"epoch": 0.5492463349163742,
"grad_norm": 2.003781400613107,
"learning_rate": 5.007212101846194e-06,
"loss": 0.4534,
"step": 665
},
{
"epoch": 0.5533760066074747,
"grad_norm": 2.1173198648186236,
"learning_rate": 4.935092884010347e-06,
"loss": 0.4582,
"step": 670
},
{
"epoch": 0.5575056782985752,
"grad_norm": 2.1197258530702303,
"learning_rate": 4.8629871703718844e-06,
"loss": 0.4484,
"step": 675
},
{
"epoch": 0.5616353499896758,
"grad_norm": 2.112938054969618,
"learning_rate": 4.7909099628265946e-06,
"loss": 0.4504,
"step": 680
},
{
"epoch": 0.5657650216807764,
"grad_norm": 2.1008936035245056,
"learning_rate": 4.718876257339444e-06,
"loss": 0.4485,
"step": 685
},
{
"epoch": 0.5698946933718769,
"grad_norm": 2.925269693235509,
"learning_rate": 4.646901040824622e-06,
"loss": 0.4523,
"step": 690
},
{
"epoch": 0.5740243650629775,
"grad_norm": 2.013226878250545,
"learning_rate": 4.574999288027437e-06,
"loss": 0.4544,
"step": 695
},
{
"epoch": 0.5781540367540781,
"grad_norm": 2.059493099572322,
"learning_rate": 4.503185958408767e-06,
"loss": 0.4467,
"step": 700
},
{
"epoch": 0.5822837084451786,
"grad_norm": 2.0049953659179716,
"learning_rate": 4.431475993032673e-06,
"loss": 0.441,
"step": 705
},
{
"epoch": 0.5864133801362792,
"grad_norm": 2.1512212806706255,
"learning_rate": 4.359884311457857e-06,
"loss": 0.4337,
"step": 710
},
{
"epoch": 0.5905430518273798,
"grad_norm": 2.1177615430048755,
"learning_rate": 4.2884258086335755e-06,
"loss": 0.4458,
"step": 715
},
{
"epoch": 0.5946727235184803,
"grad_norm": 2.008192166381139,
"learning_rate": 4.217115351800693e-06,
"loss": 0.4413,
"step": 720
},
{
"epoch": 0.5988023952095808,
"grad_norm": 2.063297374873059,
"learning_rate": 4.145967777398481e-06,
"loss": 0.4344,
"step": 725
},
{
"epoch": 0.6029320669006814,
"grad_norm": 2.004714733933455,
"learning_rate": 4.074997887977843e-06,
"loss": 0.4358,
"step": 730
},
{
"epoch": 0.6070617385917819,
"grad_norm": 2.025598614826924,
"learning_rate": 4.004220449121574e-06,
"loss": 0.4482,
"step": 735
},
{
"epoch": 0.6111914102828825,
"grad_norm": 1.938185383839704,
"learning_rate": 3.933650186372329e-06,
"loss": 0.4337,
"step": 740
},
{
"epoch": 0.615321081973983,
"grad_norm": 2.0495762719124104,
"learning_rate": 3.863301782168896e-06,
"loss": 0.4412,
"step": 745
},
{
"epoch": 0.6194507536650836,
"grad_norm": 2.028634933360134,
"learning_rate": 3.7931898727914723e-06,
"loss": 0.4293,
"step": 750
},
{
"epoch": 0.6235804253561842,
"grad_norm": 2.0029138326822404,
"learning_rate": 3.7233290453165127e-06,
"loss": 0.4304,
"step": 755
},
{
"epoch": 0.6277100970472848,
"grad_norm": 2.048375432552439,
"learning_rate": 3.6537338345818273e-06,
"loss": 0.4213,
"step": 760
},
{
"epoch": 0.6318397687383853,
"grad_norm": 2.0326809490611852,
"learning_rate": 3.5844187201625567e-06,
"loss": 0.424,
"step": 765
},
{
"epoch": 0.6359694404294859,
"grad_norm": 2.019159008104501,
"learning_rate": 3.5153981233586277e-06,
"loss": 0.4228,
"step": 770
},
{
"epoch": 0.6400991121205865,
"grad_norm": 2.09985714316485,
"learning_rate": 3.446686404194337e-06,
"loss": 0.4206,
"step": 775
},
{
"epoch": 0.644228783811687,
"grad_norm": 1.9414886074513862,
"learning_rate": 3.3782978584307035e-06,
"loss": 0.4173,
"step": 780
},
{
"epoch": 0.6483584555027875,
"grad_norm": 2.3066100655713755,
"learning_rate": 3.310246714591162e-06,
"loss": 0.4248,
"step": 785
},
{
"epoch": 0.652488127193888,
"grad_norm": 2.106953928933072,
"learning_rate": 3.2425471310012645e-06,
"loss": 0.4159,
"step": 790
},
{
"epoch": 0.6566177988849886,
"grad_norm": 2.197357084281967,
"learning_rate": 3.1752131928429787e-06,
"loss": 0.425,
"step": 795
},
{
"epoch": 0.6607474705760892,
"grad_norm": 1.9729477591043592,
"learning_rate": 3.1082589092242116e-06,
"loss": 0.4241,
"step": 800
},
{
"epoch": 0.6648771422671897,
"grad_norm": 2.0442009757093875,
"learning_rate": 3.041698210264149e-06,
"loss": 0.4238,
"step": 805
},
{
"epoch": 0.6690068139582903,
"grad_norm": 2.031799210389413,
"learning_rate": 2.9755449441950434e-06,
"loss": 0.4131,
"step": 810
},
{
"epoch": 0.6731364856493909,
"grad_norm": 2.070029206474213,
"learning_rate": 2.9098128744810245e-06,
"loss": 0.4159,
"step": 815
},
{
"epoch": 0.6772661573404914,
"grad_norm": 2.0394445933724414,
"learning_rate": 2.84451567695456e-06,
"loss": 0.42,
"step": 820
},
{
"epoch": 0.681395829031592,
"grad_norm": 2.0377377910207657,
"learning_rate": 2.7796669369711294e-06,
"loss": 0.4095,
"step": 825
},
{
"epoch": 0.6855255007226926,
"grad_norm": 2.0626337564803965,
"learning_rate": 2.715280146582752e-06,
"loss": 0.4069,
"step": 830
},
{
"epoch": 0.6896551724137931,
"grad_norm": 2.0040834447649454,
"learning_rate": 2.651368701730889e-06,
"loss": 0.41,
"step": 835
},
{
"epoch": 0.6937848441048936,
"grad_norm": 1.917041220777535,
"learning_rate": 2.5879458994593786e-06,
"loss": 0.4074,
"step": 840
},
{
"epoch": 0.6979145157959942,
"grad_norm": 2.0269668889490515,
"learning_rate": 2.5250249351479206e-06,
"loss": 0.4081,
"step": 845
},
{
"epoch": 0.7020441874870947,
"grad_norm": 1.9753046952791828,
"learning_rate": 2.4626188997667224e-06,
"loss": 0.4018,
"step": 850
},
{
"epoch": 0.7061738591781953,
"grad_norm": 2.014280374102551,
"learning_rate": 2.400740777152874e-06,
"loss": 0.4063,
"step": 855
},
{
"epoch": 0.7103035308692959,
"grad_norm": 2.026632120733398,
"learning_rate": 2.3394034413090015e-06,
"loss": 0.3955,
"step": 860
},
{
"epoch": 0.7144332025603964,
"grad_norm": 2.088872002071019,
"learning_rate": 2.278619653724781e-06,
"loss": 0.4038,
"step": 865
},
{
"epoch": 0.718562874251497,
"grad_norm": 2.096103572784026,
"learning_rate": 2.218402060721845e-06,
"loss": 0.3921,
"step": 870
},
{
"epoch": 0.7226925459425976,
"grad_norm": 1.9266065409655262,
"learning_rate": 2.1587631908226812e-06,
"loss": 0.4085,
"step": 875
},
{
"epoch": 0.7268222176336981,
"grad_norm": 1.9536655952796587,
"learning_rate": 2.09971545214401e-06,
"loss": 0.3965,
"step": 880
},
{
"epoch": 0.7309518893247987,
"grad_norm": 2.061222200669705,
"learning_rate": 2.04127112981522e-06,
"loss": 0.3941,
"step": 885
},
{
"epoch": 0.7350815610158993,
"grad_norm": 2.010203957849096,
"learning_rate": 1.9834423834224014e-06,
"loss": 0.3982,
"step": 890
},
{
"epoch": 0.7392112327069998,
"grad_norm": 1.9380026785117144,
"learning_rate": 1.926241244478496e-06,
"loss": 0.3965,
"step": 895
},
{
"epoch": 0.7433409043981003,
"grad_norm": 2.0416911208044244,
"learning_rate": 1.8696796139200852e-06,
"loss": 0.4015,
"step": 900
},
{
"epoch": 0.7474705760892009,
"grad_norm": 2.040772561659114,
"learning_rate": 1.8137692596313528e-06,
"loss": 0.3827,
"step": 905
},
{
"epoch": 0.7516002477803014,
"grad_norm": 1.9118935892662992,
"learning_rate": 1.7585218139957205e-06,
"loss": 0.3895,
"step": 910
},
{
"epoch": 0.755729919471402,
"grad_norm": 1.927575457582737,
"learning_rate": 1.7039487714756953e-06,
"loss": 0.3965,
"step": 915
},
{
"epoch": 0.7598595911625026,
"grad_norm": 1.9797748596618283,
"learning_rate": 1.6500614862213866e-06,
"loss": 0.3937,
"step": 920
},
{
"epoch": 0.7639892628536031,
"grad_norm": 2.05432833461262,
"learning_rate": 1.596871169708235e-06,
"loss": 0.3891,
"step": 925
},
{
"epoch": 0.7681189345447037,
"grad_norm": 1.9501310111794177,
"learning_rate": 1.54438888840441e-06,
"loss": 0.3801,
"step": 930
},
{
"epoch": 0.7722486062358043,
"grad_norm": 1.970421056222239,
"learning_rate": 1.4926255614683931e-06,
"loss": 0.3806,
"step": 935
},
{
"epoch": 0.7763782779269048,
"grad_norm": 2.1277167725190598,
"learning_rate": 1.4415919584771999e-06,
"loss": 0.3843,
"step": 940
},
{
"epoch": 0.7805079496180054,
"grad_norm": 1.9784981959186678,
"learning_rate": 1.3912986971857168e-06,
"loss": 0.3828,
"step": 945
},
{
"epoch": 0.784637621309106,
"grad_norm": 2.0003697611347975,
"learning_rate": 1.3417562413176405e-06,
"loss": 0.3859,
"step": 950
},
{
"epoch": 0.7887672930002065,
"grad_norm": 2.0785112893490485,
"learning_rate": 1.292974898388456e-06,
"loss": 0.3812,
"step": 955
},
{
"epoch": 0.792896964691307,
"grad_norm": 2.045681460940008,
"learning_rate": 1.2449648175609115e-06,
"loss": 0.3794,
"step": 960
},
{
"epoch": 0.7970266363824076,
"grad_norm": 2.006461021136274,
"learning_rate": 1.1977359875334454e-06,
"loss": 0.3867,
"step": 965
},
{
"epoch": 0.8011563080735081,
"grad_norm": 2.0404335899757187,
"learning_rate": 1.1512982344619904e-06,
"loss": 0.3845,
"step": 970
},
{
"epoch": 0.8052859797646087,
"grad_norm": 2.1247685897806186,
"learning_rate": 1.1056612199156093e-06,
"loss": 0.3838,
"step": 975
},
{
"epoch": 0.8094156514557093,
"grad_norm": 2.059455987291636,
"learning_rate": 1.0608344388663576e-06,
"loss": 0.3769,
"step": 980
},
{
"epoch": 0.8135453231468098,
"grad_norm": 2.010190102670995,
"learning_rate": 1.01682721771382e-06,
"loss": 0.38,
"step": 985
},
{
"epoch": 0.8176749948379104,
"grad_norm": 2.1310463662484755,
"learning_rate": 9.73648712344707e-07,
"loss": 0.3727,
"step": 990
},
{
"epoch": 0.821804666529011,
"grad_norm": 2.026722500514339,
"learning_rate": 9.313079062279429e-07,
"loss": 0.3775,
"step": 995
},
{
"epoch": 0.8259343382201115,
"grad_norm": 1.98494077041973,
"learning_rate": 8.898136085456127e-07,
"loss": 0.3795,
"step": 1000
},
{
"epoch": 0.8300640099112121,
"grad_norm": 2.0710248547928267,
"learning_rate": 8.491744523601741e-07,
"loss": 0.3789,
"step": 1005
},
{
"epoch": 0.8341936816023127,
"grad_norm": 2.052056544033471,
"learning_rate": 8.093988928183127e-07,
"loss": 0.376,
"step": 1010
},
{
"epoch": 0.8383233532934131,
"grad_norm": 2.0064833870293612,
"learning_rate": 7.70495205391818e-07,
"loss": 0.3728,
"step": 1015
},
{
"epoch": 0.8424530249845137,
"grad_norm": 2.032309328318966,
"learning_rate": 7.324714841558322e-07,
"loss": 0.3701,
"step": 1020
},
{
"epoch": 0.8465826966756143,
"grad_norm": 1.976437679829882,
"learning_rate": 6.953356401048466e-07,
"loss": 0.3796,
"step": 1025
},
{
"epoch": 0.8507123683667148,
"grad_norm": 2.0095046825383927,
"learning_rate": 6.590953995067812e-07,
"loss": 0.3718,
"step": 1030
},
{
"epoch": 0.8548420400578154,
"grad_norm": 2.1362491401601598,
"learning_rate": 6.237583022955079e-07,
"loss": 0.3669,
"step": 1035
},
{
"epoch": 0.858971711748916,
"grad_norm": 2.030103317522303,
"learning_rate": 5.89331700502136e-07,
"loss": 0.3674,
"step": 1040
},
{
"epoch": 0.8631013834400165,
"grad_norm": 2.0320527842019316,
"learning_rate": 5.558227567253832e-07,
"loss": 0.3679,
"step": 1045
},
{
"epoch": 0.8672310551311171,
"grad_norm": 2.1203806417251214,
"learning_rate": 5.232384426413772e-07,
"loss": 0.3697,
"step": 1050
},
{
"epoch": 0.8713607268222177,
"grad_norm": 1.9029660119177207,
"learning_rate": 4.915855375531642e-07,
"loss": 0.3708,
"step": 1055
},
{
"epoch": 0.8754903985133182,
"grad_norm": 1.9598889356044642,
"learning_rate": 4.608706269802471e-07,
"loss": 0.3641,
"step": 1060
},
{
"epoch": 0.8796200702044188,
"grad_norm": 2.015130495239755,
"learning_rate": 4.3110010128843747e-07,
"loss": 0.3659,
"step": 1065
},
{
"epoch": 0.8837497418955194,
"grad_norm": 1.9290620704097432,
"learning_rate": 4.022801543603194e-07,
"loss": 0.3648,
"step": 1070
},
{
"epoch": 0.8878794135866198,
"grad_norm": 2.131063629467497,
"learning_rate": 3.744167823065814e-07,
"loss": 0.3619,
"step": 1075
},
{
"epoch": 0.8920090852777204,
"grad_norm": 2.007895538821572,
"learning_rate": 3.4751578221850377e-07,
"loss": 0.3729,
"step": 1080
},
{
"epoch": 0.896138756968821,
"grad_norm": 2.0860597695484113,
"learning_rate": 3.2158275096184744e-07,
"loss": 0.3674,
"step": 1085
},
{
"epoch": 0.9002684286599215,
"grad_norm": 2.0200760244926044,
"learning_rate": 2.966230840124007e-07,
"loss": 0.3677,
"step": 1090
},
{
"epoch": 0.9043981003510221,
"grad_norm": 2.0288061050580053,
"learning_rate": 2.72641974333433e-07,
"loss": 0.3698,
"step": 1095
},
{
"epoch": 0.9085277720421226,
"grad_norm": 2.0237726744767,
"learning_rate": 2.4964441129527337e-07,
"loss": 0.3622,
"step": 1100
},
{
"epoch": 0.9126574437332232,
"grad_norm": 1.9537324233727318,
"learning_rate": 2.2763517963725169e-07,
"loss": 0.3678,
"step": 1105
},
{
"epoch": 0.9167871154243238,
"grad_norm": 2.0620765585117837,
"learning_rate": 2.0661885847221398e-07,
"loss": 0.3698,
"step": 1110
},
{
"epoch": 0.9209167871154244,
"grad_norm": 1.9666263090601819,
"learning_rate": 1.8659982033381928e-07,
"loss": 0.3615,
"step": 1115
},
{
"epoch": 0.9250464588065249,
"grad_norm": 2.0277290304841142,
"learning_rate": 1.6758223026681507e-07,
"loss": 0.3613,
"step": 1120
},
{
"epoch": 0.9291761304976255,
"grad_norm": 2.106138721077295,
"learning_rate": 1.4957004496048256e-07,
"loss": 0.3621,
"step": 1125
},
{
"epoch": 0.9333058021887259,
"grad_norm": 1.9942405326271708,
"learning_rate": 1.3256701192542853e-07,
"loss": 0.3659,
"step": 1130
},
{
"epoch": 0.9374354738798265,
"grad_norm": 1.9945899501208246,
"learning_rate": 1.1657666871390471e-07,
"loss": 0.3671,
"step": 1135
},
{
"epoch": 0.9415651455709271,
"grad_norm": 1.8785796566381439,
"learning_rate": 1.0160234218380305e-07,
"loss": 0.3593,
"step": 1140
},
{
"epoch": 0.9456948172620276,
"grad_norm": 2.025291462525694,
"learning_rate": 8.76471478064872e-08,
"loss": 0.3723,
"step": 1145
},
{
"epoch": 0.9498244889531282,
"grad_norm": 1.9718773659356506,
"learning_rate": 7.471398901860772e-08,
"loss": 0.3665,
"step": 1150
},
{
"epoch": 0.9539541606442288,
"grad_norm": 1.9769770127109265,
"learning_rate": 6.280555661802857e-08,
"loss": 0.3646,
"step": 1155
},
{
"epoch": 0.9580838323353293,
"grad_norm": 2.0267147785284716,
"learning_rate": 5.192432820399718e-08,
"loss": 0.3657,
"step": 1160
},
{
"epoch": 0.9622135040264299,
"grad_norm": 2.07581128095638,
"learning_rate": 4.207256766166845e-08,
"loss": 0.3565,
"step": 1165
},
{
"epoch": 0.9663431757175305,
"grad_norm": 2.0255359776120065,
"learning_rate": 3.3252324691093185e-08,
"loss": 0.3616,
"step": 1170
},
{
"epoch": 0.970472847408631,
"grad_norm": 2.0868262303852396,
"learning_rate": 2.546543438077087e-08,
"loss": 0.3683,
"step": 1175
},
{
"epoch": 0.9746025190997316,
"grad_norm": 2.0003015972698504,
"learning_rate": 1.8713516825851207e-08,
"loss": 0.371,
"step": 1180
},
{
"epoch": 0.9787321907908322,
"grad_norm": 1.9827933568645475,
"learning_rate": 1.2997976791065403e-08,
"loss": 0.3549,
"step": 1185
},
{
"epoch": 0.9828618624819326,
"grad_norm": 2.020348792034948,
"learning_rate": 8.32000341846162e-09,
"loss": 0.3645,
"step": 1190
},
{
"epoch": 0.9869915341730332,
"grad_norm": 1.9998961679855551,
"learning_rate": 4.6805699799967744e-09,
"loss": 0.3624,
"step": 1195
},
{
"epoch": 0.9911212058641338,
"grad_norm": 1.952421429252492,
"learning_rate": 2.0804336750429588e-09,
"loss": 0.3733,
"step": 1200
},
{
"epoch": 0.9952508775552343,
"grad_norm": 2.101729255399416,
"learning_rate": 5.201354728517905e-10,
"loss": 0.3664,
"step": 1205
},
{
"epoch": 0.9993805492463349,
"grad_norm": 2.1247280097277037,
"learning_rate": 0.0,
"loss": 0.3624,
"step": 1210
},
{
"epoch": 0.9993805492463349,
"eval_loss": 0.3181818723678589,
"eval_runtime": 1.1842,
"eval_samples_per_second": 2.533,
"eval_steps_per_second": 0.844,
"step": 1210
},
{
"epoch": 0.9993805492463349,
"step": 1210,
"total_flos": 253297038458880.0,
"train_loss": 0.5432274522860188,
"train_runtime": 27235.9072,
"train_samples_per_second": 1.422,
"train_steps_per_second": 0.044
}
],
"logging_steps": 5,
"max_steps": 1210,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 253297038458880.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}