OpenR1-Qwen-7B-SFT-Instruct / trainer_state.json
ZMC2019's picture
Model save
a2d5994
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.9976711690731253,
"eval_steps": 500,
"global_step": 3219,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.004657661853749418,
"grad_norm": 6.8637231643996905,
"learning_rate": 7.763975155279503e-07,
"loss": 0.8093,
"step": 5
},
{
"epoch": 0.009315323707498836,
"grad_norm": 4.729727723722731,
"learning_rate": 1.5527950310559006e-06,
"loss": 0.7776,
"step": 10
},
{
"epoch": 0.013972985561248253,
"grad_norm": 1.8424610027102168,
"learning_rate": 2.329192546583851e-06,
"loss": 0.6952,
"step": 15
},
{
"epoch": 0.018630647414997672,
"grad_norm": 3.025250026849208,
"learning_rate": 3.1055900621118013e-06,
"loss": 0.6745,
"step": 20
},
{
"epoch": 0.02328830926874709,
"grad_norm": 1.2524031096906298,
"learning_rate": 3.881987577639752e-06,
"loss": 0.6287,
"step": 25
},
{
"epoch": 0.027945971122496506,
"grad_norm": 0.9007115948452182,
"learning_rate": 4.658385093167702e-06,
"loss": 0.6003,
"step": 30
},
{
"epoch": 0.032603632976245925,
"grad_norm": 0.73097263427958,
"learning_rate": 5.4347826086956525e-06,
"loss": 0.5803,
"step": 35
},
{
"epoch": 0.037261294829995344,
"grad_norm": 0.594517021000242,
"learning_rate": 6.2111801242236025e-06,
"loss": 0.56,
"step": 40
},
{
"epoch": 0.04191895668374476,
"grad_norm": 0.6204146287775623,
"learning_rate": 6.9875776397515525e-06,
"loss": 0.5224,
"step": 45
},
{
"epoch": 0.04657661853749418,
"grad_norm": 0.49713032953276337,
"learning_rate": 7.763975155279503e-06,
"loss": 0.524,
"step": 50
},
{
"epoch": 0.05123428039124359,
"grad_norm": 0.47323409551062934,
"learning_rate": 8.540372670807453e-06,
"loss": 0.5123,
"step": 55
},
{
"epoch": 0.05589194224499301,
"grad_norm": 0.5140578006747811,
"learning_rate": 9.316770186335403e-06,
"loss": 0.5081,
"step": 60
},
{
"epoch": 0.06054960409874243,
"grad_norm": 0.42753521836981706,
"learning_rate": 1.0093167701863353e-05,
"loss": 0.5013,
"step": 65
},
{
"epoch": 0.06520726595249185,
"grad_norm": 0.5311644721427882,
"learning_rate": 1.0869565217391305e-05,
"loss": 0.4812,
"step": 70
},
{
"epoch": 0.06986492780624126,
"grad_norm": 0.5470354496253793,
"learning_rate": 1.1645962732919255e-05,
"loss": 0.4836,
"step": 75
},
{
"epoch": 0.07452258965999069,
"grad_norm": 0.5113258877886584,
"learning_rate": 1.2422360248447205e-05,
"loss": 0.4952,
"step": 80
},
{
"epoch": 0.0791802515137401,
"grad_norm": 0.5057618561684237,
"learning_rate": 1.3198757763975155e-05,
"loss": 0.4745,
"step": 85
},
{
"epoch": 0.08383791336748952,
"grad_norm": 0.5001007612786222,
"learning_rate": 1.3975155279503105e-05,
"loss": 0.4816,
"step": 90
},
{
"epoch": 0.08849557522123894,
"grad_norm": 0.5243509105553624,
"learning_rate": 1.4751552795031057e-05,
"loss": 0.4866,
"step": 95
},
{
"epoch": 0.09315323707498836,
"grad_norm": 0.5439557087344651,
"learning_rate": 1.5527950310559007e-05,
"loss": 0.4777,
"step": 100
},
{
"epoch": 0.09781089892873777,
"grad_norm": 0.7178104364022548,
"learning_rate": 1.630434782608696e-05,
"loss": 0.4807,
"step": 105
},
{
"epoch": 0.10246856078248719,
"grad_norm": 0.546495949795031,
"learning_rate": 1.7080745341614907e-05,
"loss": 0.4702,
"step": 110
},
{
"epoch": 0.10712622263623661,
"grad_norm": 0.5702506336081349,
"learning_rate": 1.785714285714286e-05,
"loss": 0.4705,
"step": 115
},
{
"epoch": 0.11178388448998602,
"grad_norm": 0.6325662707265639,
"learning_rate": 1.8633540372670807e-05,
"loss": 0.482,
"step": 120
},
{
"epoch": 0.11644154634373545,
"grad_norm": 0.5823217978669636,
"learning_rate": 1.940993788819876e-05,
"loss": 0.4627,
"step": 125
},
{
"epoch": 0.12109920819748486,
"grad_norm": 0.5479236260881903,
"learning_rate": 2.0186335403726707e-05,
"loss": 0.4659,
"step": 130
},
{
"epoch": 0.1257568700512343,
"grad_norm": 0.5486635640187691,
"learning_rate": 2.096273291925466e-05,
"loss": 0.4731,
"step": 135
},
{
"epoch": 0.1304145319049837,
"grad_norm": 0.6809487670952536,
"learning_rate": 2.173913043478261e-05,
"loss": 0.4671,
"step": 140
},
{
"epoch": 0.1350721937587331,
"grad_norm": 0.8855677856193335,
"learning_rate": 2.2515527950310562e-05,
"loss": 0.4774,
"step": 145
},
{
"epoch": 0.13972985561248252,
"grad_norm": 0.7587714736025803,
"learning_rate": 2.329192546583851e-05,
"loss": 0.4727,
"step": 150
},
{
"epoch": 0.14438751746623196,
"grad_norm": 0.6005017329879899,
"learning_rate": 2.4068322981366462e-05,
"loss": 0.4553,
"step": 155
},
{
"epoch": 0.14904517931998137,
"grad_norm": 0.7164655541521237,
"learning_rate": 2.484472049689441e-05,
"loss": 0.4624,
"step": 160
},
{
"epoch": 0.1537028411737308,
"grad_norm": 0.8225497978940743,
"learning_rate": 2.5621118012422362e-05,
"loss": 0.4637,
"step": 165
},
{
"epoch": 0.1583605030274802,
"grad_norm": 0.7574940659782605,
"learning_rate": 2.639751552795031e-05,
"loss": 0.4622,
"step": 170
},
{
"epoch": 0.1630181648812296,
"grad_norm": 0.6043591001115144,
"learning_rate": 2.7173913043478262e-05,
"loss": 0.4736,
"step": 175
},
{
"epoch": 0.16767582673497905,
"grad_norm": 0.6123842566150755,
"learning_rate": 2.795031055900621e-05,
"loss": 0.4688,
"step": 180
},
{
"epoch": 0.17233348858872846,
"grad_norm": 0.6573572807933171,
"learning_rate": 2.8726708074534165e-05,
"loss": 0.4674,
"step": 185
},
{
"epoch": 0.17699115044247787,
"grad_norm": 0.6569054098256357,
"learning_rate": 2.9503105590062114e-05,
"loss": 0.4614,
"step": 190
},
{
"epoch": 0.18164881229622729,
"grad_norm": 0.5669164627540662,
"learning_rate": 3.0279503105590062e-05,
"loss": 0.4673,
"step": 195
},
{
"epoch": 0.18630647414997673,
"grad_norm": 0.4741303438237046,
"learning_rate": 3.1055900621118014e-05,
"loss": 0.4421,
"step": 200
},
{
"epoch": 0.19096413600372614,
"grad_norm": 0.8316092944810866,
"learning_rate": 3.183229813664597e-05,
"loss": 0.4635,
"step": 205
},
{
"epoch": 0.19562179785747555,
"grad_norm": 0.5588719774621962,
"learning_rate": 3.260869565217392e-05,
"loss": 0.4534,
"step": 210
},
{
"epoch": 0.20027945971122496,
"grad_norm": 0.5722614945495902,
"learning_rate": 3.3385093167701865e-05,
"loss": 0.4649,
"step": 215
},
{
"epoch": 0.20493712156497437,
"grad_norm": 0.47183393664254064,
"learning_rate": 3.4161490683229814e-05,
"loss": 0.461,
"step": 220
},
{
"epoch": 0.2095947834187238,
"grad_norm": 0.4713075366570546,
"learning_rate": 3.493788819875777e-05,
"loss": 0.4641,
"step": 225
},
{
"epoch": 0.21425244527247322,
"grad_norm": 0.465225134633574,
"learning_rate": 3.571428571428572e-05,
"loss": 0.4624,
"step": 230
},
{
"epoch": 0.21891010712622264,
"grad_norm": 0.5103756288362324,
"learning_rate": 3.6490683229813665e-05,
"loss": 0.458,
"step": 235
},
{
"epoch": 0.22356776897997205,
"grad_norm": 0.4948340330469265,
"learning_rate": 3.7267080745341614e-05,
"loss": 0.457,
"step": 240
},
{
"epoch": 0.22822543083372146,
"grad_norm": 0.5735125415902487,
"learning_rate": 3.804347826086957e-05,
"loss": 0.4489,
"step": 245
},
{
"epoch": 0.2328830926874709,
"grad_norm": 0.6082116394835448,
"learning_rate": 3.881987577639752e-05,
"loss": 0.4471,
"step": 250
},
{
"epoch": 0.2375407545412203,
"grad_norm": 0.544434229947204,
"learning_rate": 3.9596273291925465e-05,
"loss": 0.4577,
"step": 255
},
{
"epoch": 0.24219841639496972,
"grad_norm": 0.48166421763412753,
"learning_rate": 4.0372670807453414e-05,
"loss": 0.4621,
"step": 260
},
{
"epoch": 0.24685607824871914,
"grad_norm": 0.6714168210349829,
"learning_rate": 4.114906832298137e-05,
"loss": 0.4569,
"step": 265
},
{
"epoch": 0.2515137401024686,
"grad_norm": 0.4407546736840361,
"learning_rate": 4.192546583850932e-05,
"loss": 0.4468,
"step": 270
},
{
"epoch": 0.25617140195621796,
"grad_norm": 0.4764022562959415,
"learning_rate": 4.270186335403727e-05,
"loss": 0.4627,
"step": 275
},
{
"epoch": 0.2608290638099674,
"grad_norm": 0.698057577633327,
"learning_rate": 4.347826086956522e-05,
"loss": 0.4516,
"step": 280
},
{
"epoch": 0.26548672566371684,
"grad_norm": 0.5447358555717683,
"learning_rate": 4.425465838509317e-05,
"loss": 0.4512,
"step": 285
},
{
"epoch": 0.2701443875174662,
"grad_norm": 0.7771738299993547,
"learning_rate": 4.5031055900621124e-05,
"loss": 0.4513,
"step": 290
},
{
"epoch": 0.27480204937121566,
"grad_norm": 0.6245714550577889,
"learning_rate": 4.580745341614907e-05,
"loss": 0.4451,
"step": 295
},
{
"epoch": 0.27945971122496505,
"grad_norm": 0.43790665434878945,
"learning_rate": 4.658385093167702e-05,
"loss": 0.4567,
"step": 300
},
{
"epoch": 0.2841173730787145,
"grad_norm": 0.5618750399986439,
"learning_rate": 4.736024844720497e-05,
"loss": 0.4623,
"step": 305
},
{
"epoch": 0.2887750349324639,
"grad_norm": 0.5231305670185117,
"learning_rate": 4.8136645962732924e-05,
"loss": 0.4578,
"step": 310
},
{
"epoch": 0.2934326967862133,
"grad_norm": 0.5361127188910412,
"learning_rate": 4.891304347826087e-05,
"loss": 0.4489,
"step": 315
},
{
"epoch": 0.29809035863996275,
"grad_norm": 0.4732170257669621,
"learning_rate": 4.968944099378882e-05,
"loss": 0.4563,
"step": 320
},
{
"epoch": 0.30274802049371213,
"grad_norm": 0.4501399431086543,
"learning_rate": 4.994822229892993e-05,
"loss": 0.4592,
"step": 325
},
{
"epoch": 0.3074056823474616,
"grad_norm": 0.6834758906392285,
"learning_rate": 4.986192613047981e-05,
"loss": 0.4575,
"step": 330
},
{
"epoch": 0.312063344201211,
"grad_norm": 0.5362283122366314,
"learning_rate": 4.977562996202969e-05,
"loss": 0.4427,
"step": 335
},
{
"epoch": 0.3167210060549604,
"grad_norm": 0.5938350871835776,
"learning_rate": 4.968933379357957e-05,
"loss": 0.4559,
"step": 340
},
{
"epoch": 0.32137866790870984,
"grad_norm": 0.6179062918046965,
"learning_rate": 4.9603037625129445e-05,
"loss": 0.4452,
"step": 345
},
{
"epoch": 0.3260363297624592,
"grad_norm": 0.503244246267909,
"learning_rate": 4.951674145667933e-05,
"loss": 0.4478,
"step": 350
},
{
"epoch": 0.33069399161620866,
"grad_norm": 0.4864627905979029,
"learning_rate": 4.94304452882292e-05,
"loss": 0.4452,
"step": 355
},
{
"epoch": 0.3353516534699581,
"grad_norm": 0.42350634326603037,
"learning_rate": 4.934414911977908e-05,
"loss": 0.4536,
"step": 360
},
{
"epoch": 0.3400093153237075,
"grad_norm": 0.415079269533796,
"learning_rate": 4.9257852951328965e-05,
"loss": 0.4535,
"step": 365
},
{
"epoch": 0.3446669771774569,
"grad_norm": 0.4443162625377456,
"learning_rate": 4.917155678287884e-05,
"loss": 0.4632,
"step": 370
},
{
"epoch": 0.3493246390312063,
"grad_norm": 0.4272948200404277,
"learning_rate": 4.908526061442872e-05,
"loss": 0.4573,
"step": 375
},
{
"epoch": 0.35398230088495575,
"grad_norm": 0.44159526857296133,
"learning_rate": 4.89989644459786e-05,
"loss": 0.4439,
"step": 380
},
{
"epoch": 0.3586399627387052,
"grad_norm": 0.45615653427248887,
"learning_rate": 4.891266827752848e-05,
"loss": 0.4391,
"step": 385
},
{
"epoch": 0.36329762459245457,
"grad_norm": 0.4876335217281381,
"learning_rate": 4.882637210907836e-05,
"loss": 0.4442,
"step": 390
},
{
"epoch": 0.367955286446204,
"grad_norm": 0.4058309541849574,
"learning_rate": 4.874007594062824e-05,
"loss": 0.439,
"step": 395
},
{
"epoch": 0.37261294829995345,
"grad_norm": 481.39439612564684,
"learning_rate": 4.865377977217811e-05,
"loss": 0.541,
"step": 400
},
{
"epoch": 0.37727061015370283,
"grad_norm": 0.47735090439252476,
"learning_rate": 4.8567483603728e-05,
"loss": 0.4431,
"step": 405
},
{
"epoch": 0.3819282720074523,
"grad_norm": 0.491303661903526,
"learning_rate": 4.8481187435277875e-05,
"loss": 0.4474,
"step": 410
},
{
"epoch": 0.38658593386120166,
"grad_norm": 0.41603407985945356,
"learning_rate": 4.839489126682776e-05,
"loss": 0.4458,
"step": 415
},
{
"epoch": 0.3912435957149511,
"grad_norm": 0.45507287709983313,
"learning_rate": 4.830859509837763e-05,
"loss": 0.4362,
"step": 420
},
{
"epoch": 0.39590125756870054,
"grad_norm": 0.46531195249792473,
"learning_rate": 4.822229892992751e-05,
"loss": 0.4517,
"step": 425
},
{
"epoch": 0.4005589194224499,
"grad_norm": 0.4902178812723198,
"learning_rate": 4.8136002761477395e-05,
"loss": 0.4446,
"step": 430
},
{
"epoch": 0.40521658127619936,
"grad_norm": 0.4847211890046387,
"learning_rate": 4.804970659302727e-05,
"loss": 0.448,
"step": 435
},
{
"epoch": 0.40987424312994875,
"grad_norm": 0.4394393821073969,
"learning_rate": 4.796341042457715e-05,
"loss": 0.4561,
"step": 440
},
{
"epoch": 0.4145319049836982,
"grad_norm": 0.43517418417308185,
"learning_rate": 4.787711425612703e-05,
"loss": 0.4355,
"step": 445
},
{
"epoch": 0.4191895668374476,
"grad_norm": 0.4547774758547587,
"learning_rate": 4.779081808767691e-05,
"loss": 0.4398,
"step": 450
},
{
"epoch": 0.423847228691197,
"grad_norm": 0.6065747366678503,
"learning_rate": 4.770452191922679e-05,
"loss": 0.4574,
"step": 455
},
{
"epoch": 0.42850489054494645,
"grad_norm": 0.38522024825247947,
"learning_rate": 4.761822575077667e-05,
"loss": 0.4382,
"step": 460
},
{
"epoch": 0.43316255239869583,
"grad_norm": 0.3876206766802,
"learning_rate": 4.753192958232654e-05,
"loss": 0.4473,
"step": 465
},
{
"epoch": 0.43782021425244527,
"grad_norm": 0.49840542772899743,
"learning_rate": 4.744563341387643e-05,
"loss": 0.44,
"step": 470
},
{
"epoch": 0.4424778761061947,
"grad_norm": 0.4023787142304999,
"learning_rate": 4.7359337245426306e-05,
"loss": 0.4479,
"step": 475
},
{
"epoch": 0.4471355379599441,
"grad_norm": 0.3285212211122666,
"learning_rate": 4.7273041076976184e-05,
"loss": 0.434,
"step": 480
},
{
"epoch": 0.45179319981369354,
"grad_norm": 0.3961706976207895,
"learning_rate": 4.718674490852606e-05,
"loss": 0.436,
"step": 485
},
{
"epoch": 0.4564508616674429,
"grad_norm": 0.396382884763074,
"learning_rate": 4.710044874007594e-05,
"loss": 0.4389,
"step": 490
},
{
"epoch": 0.46110852352119236,
"grad_norm": 0.3583524612839967,
"learning_rate": 4.7014152571625826e-05,
"loss": 0.4322,
"step": 495
},
{
"epoch": 0.4657661853749418,
"grad_norm": 0.3988323726838178,
"learning_rate": 4.6927856403175704e-05,
"loss": 0.4384,
"step": 500
},
{
"epoch": 0.4704238472286912,
"grad_norm": 0.34259386355774835,
"learning_rate": 4.684156023472558e-05,
"loss": 0.4331,
"step": 505
},
{
"epoch": 0.4750815090824406,
"grad_norm": 0.36356797696670407,
"learning_rate": 4.675526406627546e-05,
"loss": 0.4289,
"step": 510
},
{
"epoch": 0.47973917093619,
"grad_norm": 0.3901286598881084,
"learning_rate": 4.666896789782534e-05,
"loss": 0.4396,
"step": 515
},
{
"epoch": 0.48439683278993945,
"grad_norm": 0.370989030940083,
"learning_rate": 4.658267172937522e-05,
"loss": 0.4372,
"step": 520
},
{
"epoch": 0.4890544946436889,
"grad_norm": 0.39965598904455285,
"learning_rate": 4.64963755609251e-05,
"loss": 0.4455,
"step": 525
},
{
"epoch": 0.49371215649743827,
"grad_norm": 0.46668846506387435,
"learning_rate": 4.641007939247497e-05,
"loss": 0.4326,
"step": 530
},
{
"epoch": 0.4983698183511877,
"grad_norm": 0.582132009650607,
"learning_rate": 4.632378322402486e-05,
"loss": 0.4397,
"step": 535
},
{
"epoch": 0.5030274802049371,
"grad_norm": 0.4121012916753587,
"learning_rate": 4.6237487055574736e-05,
"loss": 0.4417,
"step": 540
},
{
"epoch": 0.5076851420586865,
"grad_norm": 0.3576279653758138,
"learning_rate": 4.6151190887124615e-05,
"loss": 0.4313,
"step": 545
},
{
"epoch": 0.5123428039124359,
"grad_norm": 0.3572720255717242,
"learning_rate": 4.606489471867449e-05,
"loss": 0.4457,
"step": 550
},
{
"epoch": 0.5170004657661854,
"grad_norm": 0.547766271076225,
"learning_rate": 4.597859855022437e-05,
"loss": 0.4345,
"step": 555
},
{
"epoch": 0.5216581276199348,
"grad_norm": 0.40390301623067965,
"learning_rate": 4.589230238177425e-05,
"loss": 0.4391,
"step": 560
},
{
"epoch": 0.5263157894736842,
"grad_norm": 0.360272003554287,
"learning_rate": 4.5806006213324134e-05,
"loss": 0.4378,
"step": 565
},
{
"epoch": 0.5309734513274337,
"grad_norm": 0.43764565431712693,
"learning_rate": 4.5719710044874006e-05,
"loss": 0.437,
"step": 570
},
{
"epoch": 0.5356311131811831,
"grad_norm": 0.35655227981632326,
"learning_rate": 4.563341387642389e-05,
"loss": 0.4338,
"step": 575
},
{
"epoch": 0.5402887750349324,
"grad_norm": 0.40521948022517407,
"learning_rate": 4.554711770797377e-05,
"loss": 0.4373,
"step": 580
},
{
"epoch": 0.5449464368886818,
"grad_norm": 0.3883906171159338,
"learning_rate": 4.546082153952365e-05,
"loss": 0.4338,
"step": 585
},
{
"epoch": 0.5496040987424313,
"grad_norm": 0.35569113255509077,
"learning_rate": 4.5374525371073526e-05,
"loss": 0.4477,
"step": 590
},
{
"epoch": 0.5542617605961807,
"grad_norm": 0.4258008397028241,
"learning_rate": 4.5288229202623404e-05,
"loss": 0.4381,
"step": 595
},
{
"epoch": 0.5589194224499301,
"grad_norm": 0.413449691817557,
"learning_rate": 4.520193303417328e-05,
"loss": 0.4434,
"step": 600
},
{
"epoch": 0.5635770843036796,
"grad_norm": 0.3430385595750802,
"learning_rate": 4.511563686572317e-05,
"loss": 0.4187,
"step": 605
},
{
"epoch": 0.568234746157429,
"grad_norm": 0.4042699670473815,
"learning_rate": 4.5029340697273045e-05,
"loss": 0.4363,
"step": 610
},
{
"epoch": 0.5728924080111784,
"grad_norm": 0.43480499151771707,
"learning_rate": 4.4943044528822923e-05,
"loss": 0.4286,
"step": 615
},
{
"epoch": 0.5775500698649279,
"grad_norm": 0.4652132056153088,
"learning_rate": 4.48567483603728e-05,
"loss": 0.4315,
"step": 620
},
{
"epoch": 0.5822077317186772,
"grad_norm": 0.36729682033831373,
"learning_rate": 4.477045219192268e-05,
"loss": 0.441,
"step": 625
},
{
"epoch": 0.5868653935724266,
"grad_norm": 0.350174779447523,
"learning_rate": 4.4684156023472565e-05,
"loss": 0.4283,
"step": 630
},
{
"epoch": 0.5915230554261761,
"grad_norm": 0.3988360790981172,
"learning_rate": 4.4597859855022436e-05,
"loss": 0.4277,
"step": 635
},
{
"epoch": 0.5961807172799255,
"grad_norm": 0.364409160363076,
"learning_rate": 4.4511563686572315e-05,
"loss": 0.4323,
"step": 640
},
{
"epoch": 0.6008383791336749,
"grad_norm": 0.4049958520953601,
"learning_rate": 4.44252675181222e-05,
"loss": 0.4319,
"step": 645
},
{
"epoch": 0.6054960409874243,
"grad_norm": 0.401092404832482,
"learning_rate": 4.433897134967208e-05,
"loss": 0.4298,
"step": 650
},
{
"epoch": 0.6101537028411738,
"grad_norm": 0.34234109065820656,
"learning_rate": 4.4252675181221956e-05,
"loss": 0.4357,
"step": 655
},
{
"epoch": 0.6148113646949231,
"grad_norm": 0.31842990925114173,
"learning_rate": 4.4166379012771834e-05,
"loss": 0.436,
"step": 660
},
{
"epoch": 0.6194690265486725,
"grad_norm": 0.32769017770224784,
"learning_rate": 4.408008284432171e-05,
"loss": 0.4308,
"step": 665
},
{
"epoch": 0.624126688402422,
"grad_norm": 0.34233152696132674,
"learning_rate": 4.39937866758716e-05,
"loss": 0.4332,
"step": 670
},
{
"epoch": 0.6287843502561714,
"grad_norm": 0.35536872530734553,
"learning_rate": 4.3907490507421476e-05,
"loss": 0.4258,
"step": 675
},
{
"epoch": 0.6334420121099208,
"grad_norm": 0.34026249002596654,
"learning_rate": 4.382119433897135e-05,
"loss": 0.4295,
"step": 680
},
{
"epoch": 0.6380996739636703,
"grad_norm": 0.41190356822364965,
"learning_rate": 4.373489817052123e-05,
"loss": 0.4406,
"step": 685
},
{
"epoch": 0.6427573358174197,
"grad_norm": 0.49421175909812803,
"learning_rate": 4.364860200207111e-05,
"loss": 0.4322,
"step": 690
},
{
"epoch": 0.6474149976711691,
"grad_norm": 0.3603660811499903,
"learning_rate": 4.356230583362099e-05,
"loss": 0.4302,
"step": 695
},
{
"epoch": 0.6520726595249184,
"grad_norm": 0.3125363535924705,
"learning_rate": 4.347600966517087e-05,
"loss": 0.4275,
"step": 700
},
{
"epoch": 0.6567303213786679,
"grad_norm": 0.30041905324736656,
"learning_rate": 4.3389713496720745e-05,
"loss": 0.4249,
"step": 705
},
{
"epoch": 0.6613879832324173,
"grad_norm": 0.345152816790548,
"learning_rate": 4.330341732827063e-05,
"loss": 0.4362,
"step": 710
},
{
"epoch": 0.6660456450861667,
"grad_norm": 0.37585421147119386,
"learning_rate": 4.321712115982051e-05,
"loss": 0.4306,
"step": 715
},
{
"epoch": 0.6707033069399162,
"grad_norm": 0.3555850152506123,
"learning_rate": 4.3130824991370387e-05,
"loss": 0.4225,
"step": 720
},
{
"epoch": 0.6753609687936656,
"grad_norm": 0.37642144873402433,
"learning_rate": 4.3044528822920265e-05,
"loss": 0.427,
"step": 725
},
{
"epoch": 0.680018630647415,
"grad_norm": 0.39601172436978427,
"learning_rate": 4.295823265447014e-05,
"loss": 0.4334,
"step": 730
},
{
"epoch": 0.6846762925011645,
"grad_norm": 0.30751773394560866,
"learning_rate": 4.287193648602002e-05,
"loss": 0.4221,
"step": 735
},
{
"epoch": 0.6893339543549138,
"grad_norm": 0.2987838209757166,
"learning_rate": 4.27856403175699e-05,
"loss": 0.4264,
"step": 740
},
{
"epoch": 0.6939916162086632,
"grad_norm": 0.33988559830495146,
"learning_rate": 4.269934414911978e-05,
"loss": 0.4233,
"step": 745
},
{
"epoch": 0.6986492780624126,
"grad_norm": 0.4343934463688201,
"learning_rate": 4.261304798066966e-05,
"loss": 0.4294,
"step": 750
},
{
"epoch": 0.7033069399161621,
"grad_norm": 0.37360553714178396,
"learning_rate": 4.252675181221954e-05,
"loss": 0.4205,
"step": 755
},
{
"epoch": 0.7079646017699115,
"grad_norm": 0.3518865734884153,
"learning_rate": 4.244045564376942e-05,
"loss": 0.4234,
"step": 760
},
{
"epoch": 0.7126222636236609,
"grad_norm": 0.3831682013029351,
"learning_rate": 4.23541594753193e-05,
"loss": 0.4306,
"step": 765
},
{
"epoch": 0.7172799254774104,
"grad_norm": 0.41718671802772334,
"learning_rate": 4.2267863306869176e-05,
"loss": 0.4302,
"step": 770
},
{
"epoch": 0.7219375873311598,
"grad_norm": 0.4835433327884266,
"learning_rate": 4.2181567138419054e-05,
"loss": 0.4193,
"step": 775
},
{
"epoch": 0.7265952491849091,
"grad_norm": 0.38728558689060666,
"learning_rate": 4.209527096996894e-05,
"loss": 0.4348,
"step": 780
},
{
"epoch": 0.7312529110386586,
"grad_norm": 0.34227700976916714,
"learning_rate": 4.200897480151881e-05,
"loss": 0.4282,
"step": 785
},
{
"epoch": 0.735910572892408,
"grad_norm": 0.36104964606844725,
"learning_rate": 4.1922678633068695e-05,
"loss": 0.4207,
"step": 790
},
{
"epoch": 0.7405682347461574,
"grad_norm": 0.3693699727676046,
"learning_rate": 4.1836382464618573e-05,
"loss": 0.4271,
"step": 795
},
{
"epoch": 0.7452258965999069,
"grad_norm": 0.3252684155467639,
"learning_rate": 4.175008629616845e-05,
"loss": 0.4234,
"step": 800
},
{
"epoch": 0.7498835584536563,
"grad_norm": 0.3135302717292993,
"learning_rate": 4.166379012771833e-05,
"loss": 0.4382,
"step": 805
},
{
"epoch": 0.7545412203074057,
"grad_norm": 0.4218553599465769,
"learning_rate": 4.157749395926821e-05,
"loss": 0.4261,
"step": 810
},
{
"epoch": 0.759198882161155,
"grad_norm": 0.3411302170331546,
"learning_rate": 4.1491197790818086e-05,
"loss": 0.4276,
"step": 815
},
{
"epoch": 0.7638565440149045,
"grad_norm": 0.3255638030179571,
"learning_rate": 4.140490162236797e-05,
"loss": 0.4272,
"step": 820
},
{
"epoch": 0.7685142058686539,
"grad_norm": 0.33969362630966377,
"learning_rate": 4.131860545391785e-05,
"loss": 0.4155,
"step": 825
},
{
"epoch": 0.7731718677224033,
"grad_norm": 0.3026933967426526,
"learning_rate": 4.123230928546773e-05,
"loss": 0.4227,
"step": 830
},
{
"epoch": 0.7778295295761528,
"grad_norm": 0.3772435054159253,
"learning_rate": 4.1146013117017606e-05,
"loss": 0.4302,
"step": 835
},
{
"epoch": 0.7824871914299022,
"grad_norm": 0.30135072993267004,
"learning_rate": 4.1059716948567484e-05,
"loss": 0.4158,
"step": 840
},
{
"epoch": 0.7871448532836516,
"grad_norm": 0.33346125457132675,
"learning_rate": 4.097342078011737e-05,
"loss": 0.4212,
"step": 845
},
{
"epoch": 0.7918025151374011,
"grad_norm": 0.282888962852797,
"learning_rate": 4.088712461166724e-05,
"loss": 0.4233,
"step": 850
},
{
"epoch": 0.7964601769911505,
"grad_norm": 0.28794732707700255,
"learning_rate": 4.080082844321712e-05,
"loss": 0.4153,
"step": 855
},
{
"epoch": 0.8011178388448998,
"grad_norm": 0.31081358602252523,
"learning_rate": 4.0714532274767004e-05,
"loss": 0.4178,
"step": 860
},
{
"epoch": 0.8057755006986492,
"grad_norm": 0.3191261560402839,
"learning_rate": 4.062823610631688e-05,
"loss": 0.4205,
"step": 865
},
{
"epoch": 0.8104331625523987,
"grad_norm": 0.3142363512759355,
"learning_rate": 4.054193993786676e-05,
"loss": 0.4154,
"step": 870
},
{
"epoch": 0.8150908244061481,
"grad_norm": 0.3364363550776446,
"learning_rate": 4.045564376941664e-05,
"loss": 0.4246,
"step": 875
},
{
"epoch": 0.8197484862598975,
"grad_norm": 0.2747088202515003,
"learning_rate": 4.036934760096652e-05,
"loss": 0.4094,
"step": 880
},
{
"epoch": 0.824406148113647,
"grad_norm": 0.301772676161776,
"learning_rate": 4.02830514325164e-05,
"loss": 0.4241,
"step": 885
},
{
"epoch": 0.8290638099673964,
"grad_norm": 0.3016593662082275,
"learning_rate": 4.019675526406628e-05,
"loss": 0.4227,
"step": 890
},
{
"epoch": 0.8337214718211458,
"grad_norm": 0.33138408886331777,
"learning_rate": 4.011045909561615e-05,
"loss": 0.4232,
"step": 895
},
{
"epoch": 0.8383791336748952,
"grad_norm": 0.2786490697206707,
"learning_rate": 4.0024162927166037e-05,
"loss": 0.4182,
"step": 900
},
{
"epoch": 0.8430367955286446,
"grad_norm": 0.29649339366141875,
"learning_rate": 3.9937866758715915e-05,
"loss": 0.4211,
"step": 905
},
{
"epoch": 0.847694457382394,
"grad_norm": 0.3222105000197682,
"learning_rate": 3.98515705902658e-05,
"loss": 0.427,
"step": 910
},
{
"epoch": 0.8523521192361434,
"grad_norm": 0.3341349853429822,
"learning_rate": 3.976527442181567e-05,
"loss": 0.4189,
"step": 915
},
{
"epoch": 0.8570097810898929,
"grad_norm": 0.34063605933590474,
"learning_rate": 3.967897825336555e-05,
"loss": 0.4263,
"step": 920
},
{
"epoch": 0.8616674429436423,
"grad_norm": 0.30926572421159065,
"learning_rate": 3.9592682084915434e-05,
"loss": 0.4208,
"step": 925
},
{
"epoch": 0.8663251047973917,
"grad_norm": 0.30756608599962054,
"learning_rate": 3.950638591646531e-05,
"loss": 0.4219,
"step": 930
},
{
"epoch": 0.8709827666511412,
"grad_norm": 0.2998808322519085,
"learning_rate": 3.942008974801519e-05,
"loss": 0.4115,
"step": 935
},
{
"epoch": 0.8756404285048905,
"grad_norm": 0.3388893716296557,
"learning_rate": 3.933379357956507e-05,
"loss": 0.4116,
"step": 940
},
{
"epoch": 0.8802980903586399,
"grad_norm": 0.3130985744320382,
"learning_rate": 3.924749741111495e-05,
"loss": 0.4152,
"step": 945
},
{
"epoch": 0.8849557522123894,
"grad_norm": 0.30426689978117843,
"learning_rate": 3.916120124266483e-05,
"loss": 0.4048,
"step": 950
},
{
"epoch": 0.8896134140661388,
"grad_norm": 0.2886330489061831,
"learning_rate": 3.9074905074214704e-05,
"loss": 0.4173,
"step": 955
},
{
"epoch": 0.8942710759198882,
"grad_norm": 1.289716934090849,
"learning_rate": 3.898860890576458e-05,
"loss": 0.426,
"step": 960
},
{
"epoch": 0.8989287377736377,
"grad_norm": 0.2921968866113809,
"learning_rate": 3.890231273731447e-05,
"loss": 0.4179,
"step": 965
},
{
"epoch": 0.9035863996273871,
"grad_norm": 0.3069214291008881,
"learning_rate": 3.8816016568864345e-05,
"loss": 0.4211,
"step": 970
},
{
"epoch": 0.9082440614811365,
"grad_norm": 0.30084230027853326,
"learning_rate": 3.8729720400414224e-05,
"loss": 0.4159,
"step": 975
},
{
"epoch": 0.9129017233348858,
"grad_norm": 0.30750699298398976,
"learning_rate": 3.86434242319641e-05,
"loss": 0.419,
"step": 980
},
{
"epoch": 0.9175593851886353,
"grad_norm": 0.3004553953211842,
"learning_rate": 3.855712806351398e-05,
"loss": 0.4235,
"step": 985
},
{
"epoch": 0.9222170470423847,
"grad_norm": 0.2875475095748544,
"learning_rate": 3.8470831895063865e-05,
"loss": 0.3969,
"step": 990
},
{
"epoch": 0.9268747088961341,
"grad_norm": 0.30257179456095296,
"learning_rate": 3.838453572661374e-05,
"loss": 0.4122,
"step": 995
},
{
"epoch": 0.9315323707498836,
"grad_norm": 0.3325193460041206,
"learning_rate": 3.8298239558163615e-05,
"loss": 0.4166,
"step": 1000
},
{
"epoch": 0.936190032603633,
"grad_norm": 0.2877923670180117,
"learning_rate": 3.82119433897135e-05,
"loss": 0.4238,
"step": 1005
},
{
"epoch": 0.9408476944573824,
"grad_norm": 0.3098748526987193,
"learning_rate": 3.812564722126338e-05,
"loss": 0.4142,
"step": 1010
},
{
"epoch": 0.9455053563111319,
"grad_norm": 0.27274244149432386,
"learning_rate": 3.8039351052813256e-05,
"loss": 0.4157,
"step": 1015
},
{
"epoch": 0.9501630181648812,
"grad_norm": 0.2853193572022348,
"learning_rate": 3.7953054884363134e-05,
"loss": 0.4235,
"step": 1020
},
{
"epoch": 0.9548206800186306,
"grad_norm": 0.379133486073973,
"learning_rate": 3.786675871591301e-05,
"loss": 0.4157,
"step": 1025
},
{
"epoch": 0.95947834187238,
"grad_norm": 0.32560077911033863,
"learning_rate": 3.77804625474629e-05,
"loss": 0.4078,
"step": 1030
},
{
"epoch": 0.9641360037261295,
"grad_norm": 0.2715212772469474,
"learning_rate": 3.7694166379012776e-05,
"loss": 0.4108,
"step": 1035
},
{
"epoch": 0.9687936655798789,
"grad_norm": 0.3202605689576077,
"learning_rate": 3.7607870210562654e-05,
"loss": 0.4102,
"step": 1040
},
{
"epoch": 0.9734513274336283,
"grad_norm": 0.2922435947862865,
"learning_rate": 3.752157404211253e-05,
"loss": 0.4111,
"step": 1045
},
{
"epoch": 0.9781089892873778,
"grad_norm": 0.2957561031633552,
"learning_rate": 3.743527787366241e-05,
"loss": 0.4113,
"step": 1050
},
{
"epoch": 0.9827666511411272,
"grad_norm": 0.31048108883800707,
"learning_rate": 3.734898170521229e-05,
"loss": 0.4122,
"step": 1055
},
{
"epoch": 0.9874243129948765,
"grad_norm": 0.2953875423271191,
"learning_rate": 3.7262685536762174e-05,
"loss": 0.4152,
"step": 1060
},
{
"epoch": 0.992081974848626,
"grad_norm": 0.286137496896298,
"learning_rate": 3.7176389368312045e-05,
"loss": 0.4121,
"step": 1065
},
{
"epoch": 0.9967396367023754,
"grad_norm": 0.2831385238087909,
"learning_rate": 3.709009319986193e-05,
"loss": 0.4043,
"step": 1070
},
{
"epoch": 1.00093153237075,
"grad_norm": 0.4684349512233376,
"learning_rate": 3.700379703141181e-05,
"loss": 0.414,
"step": 1075
},
{
"epoch": 1.0055891942244992,
"grad_norm": 0.3538765149793847,
"learning_rate": 3.6917500862961687e-05,
"loss": 0.3541,
"step": 1080
},
{
"epoch": 1.0102468560782487,
"grad_norm": 0.30560129560141963,
"learning_rate": 3.6831204694511565e-05,
"loss": 0.3423,
"step": 1085
},
{
"epoch": 1.0149045179319982,
"grad_norm": 0.30243535095385876,
"learning_rate": 3.674490852606144e-05,
"loss": 0.3553,
"step": 1090
},
{
"epoch": 1.0195621797857475,
"grad_norm": 0.3507483425624034,
"learning_rate": 3.665861235761132e-05,
"loss": 0.3504,
"step": 1095
},
{
"epoch": 1.024219841639497,
"grad_norm": 0.32221792171674635,
"learning_rate": 3.6572316189161206e-05,
"loss": 0.3541,
"step": 1100
},
{
"epoch": 1.0288775034932465,
"grad_norm": 0.37215297022295046,
"learning_rate": 3.6486020020711085e-05,
"loss": 0.3534,
"step": 1105
},
{
"epoch": 1.0335351653469957,
"grad_norm": 1.1943932059811095,
"learning_rate": 3.639972385226096e-05,
"loss": 0.3546,
"step": 1110
},
{
"epoch": 1.0381928272007452,
"grad_norm": 0.3516112606363948,
"learning_rate": 3.631342768381084e-05,
"loss": 0.3469,
"step": 1115
},
{
"epoch": 1.0428504890544947,
"grad_norm": 0.28850962245298706,
"learning_rate": 3.622713151536072e-05,
"loss": 0.3538,
"step": 1120
},
{
"epoch": 1.047508150908244,
"grad_norm": 0.3314624580634308,
"learning_rate": 3.6140835346910604e-05,
"loss": 0.359,
"step": 1125
},
{
"epoch": 1.0521658127619935,
"grad_norm": 0.3790414890626171,
"learning_rate": 3.6054539178460476e-05,
"loss": 0.3443,
"step": 1130
},
{
"epoch": 1.056823474615743,
"grad_norm": 0.30670131819474084,
"learning_rate": 3.5968243010010354e-05,
"loss": 0.353,
"step": 1135
},
{
"epoch": 1.0614811364694923,
"grad_norm": 0.35114327907437415,
"learning_rate": 3.588194684156024e-05,
"loss": 0.3526,
"step": 1140
},
{
"epoch": 1.0661387983232418,
"grad_norm": 0.32134248505751767,
"learning_rate": 3.579565067311012e-05,
"loss": 0.3531,
"step": 1145
},
{
"epoch": 1.0707964601769913,
"grad_norm": 0.31651076269082523,
"learning_rate": 3.5709354504659995e-05,
"loss": 0.3491,
"step": 1150
},
{
"epoch": 1.0754541220307405,
"grad_norm": 0.2976154864236664,
"learning_rate": 3.5623058336209874e-05,
"loss": 0.3522,
"step": 1155
},
{
"epoch": 1.08011178388449,
"grad_norm": 0.32449478555851013,
"learning_rate": 3.553676216775975e-05,
"loss": 0.3595,
"step": 1160
},
{
"epoch": 1.0847694457382393,
"grad_norm": 0.27878607188094323,
"learning_rate": 3.545046599930964e-05,
"loss": 0.3583,
"step": 1165
},
{
"epoch": 1.0894271075919888,
"grad_norm": 0.31988584789330404,
"learning_rate": 3.536416983085951e-05,
"loss": 0.3556,
"step": 1170
},
{
"epoch": 1.0940847694457383,
"grad_norm": 0.3032847585581242,
"learning_rate": 3.5277873662409386e-05,
"loss": 0.3555,
"step": 1175
},
{
"epoch": 1.0987424312994876,
"grad_norm": 0.309027578439761,
"learning_rate": 3.519157749395927e-05,
"loss": 0.3492,
"step": 1180
},
{
"epoch": 1.103400093153237,
"grad_norm": 0.27391324423231966,
"learning_rate": 3.510528132550915e-05,
"loss": 0.3502,
"step": 1185
},
{
"epoch": 1.1080577550069866,
"grad_norm": 0.3061764334729438,
"learning_rate": 3.501898515705903e-05,
"loss": 0.3335,
"step": 1190
},
{
"epoch": 1.1127154168607358,
"grad_norm": 0.2875710397797018,
"learning_rate": 3.4932688988608906e-05,
"loss": 0.3417,
"step": 1195
},
{
"epoch": 1.1173730787144853,
"grad_norm": 0.2624004234850815,
"learning_rate": 3.4846392820158784e-05,
"loss": 0.3511,
"step": 1200
},
{
"epoch": 1.1220307405682348,
"grad_norm": 0.3248110653316974,
"learning_rate": 3.476009665170867e-05,
"loss": 0.3583,
"step": 1205
},
{
"epoch": 1.126688402421984,
"grad_norm": 0.3633571821910072,
"learning_rate": 3.467380048325855e-05,
"loss": 0.3533,
"step": 1210
},
{
"epoch": 1.1313460642757336,
"grad_norm": 0.33993904005897413,
"learning_rate": 3.458750431480842e-05,
"loss": 0.3488,
"step": 1215
},
{
"epoch": 1.136003726129483,
"grad_norm": 0.2870516215844625,
"learning_rate": 3.4501208146358304e-05,
"loss": 0.3613,
"step": 1220
},
{
"epoch": 1.1406613879832324,
"grad_norm": 0.3032689766119555,
"learning_rate": 3.441491197790818e-05,
"loss": 0.3495,
"step": 1225
},
{
"epoch": 1.1453190498369819,
"grad_norm": 0.28433837156321073,
"learning_rate": 3.432861580945806e-05,
"loss": 0.3505,
"step": 1230
},
{
"epoch": 1.1499767116907313,
"grad_norm": 0.291464118593231,
"learning_rate": 3.424231964100794e-05,
"loss": 0.3514,
"step": 1235
},
{
"epoch": 1.1546343735444806,
"grad_norm": 0.2885273892747404,
"learning_rate": 3.415602347255782e-05,
"loss": 0.3561,
"step": 1240
},
{
"epoch": 1.1592920353982301,
"grad_norm": 0.31723881770265705,
"learning_rate": 3.40697273041077e-05,
"loss": 0.3584,
"step": 1245
},
{
"epoch": 1.1639496972519794,
"grad_norm": 2.0745129939580536,
"learning_rate": 3.398343113565758e-05,
"loss": 0.3663,
"step": 1250
},
{
"epoch": 1.1686073591057289,
"grad_norm": 0.5108539849873057,
"learning_rate": 3.389713496720746e-05,
"loss": 0.3561,
"step": 1255
},
{
"epoch": 1.1732650209594784,
"grad_norm": 0.30039700859419805,
"learning_rate": 3.381083879875734e-05,
"loss": 0.3642,
"step": 1260
},
{
"epoch": 1.1779226828132279,
"grad_norm": 0.35686121830383705,
"learning_rate": 3.3724542630307215e-05,
"loss": 0.3563,
"step": 1265
},
{
"epoch": 1.1825803446669771,
"grad_norm": 0.3187570633882832,
"learning_rate": 3.363824646185709e-05,
"loss": 0.3435,
"step": 1270
},
{
"epoch": 1.1872380065207266,
"grad_norm": 0.332204557041565,
"learning_rate": 3.355195029340698e-05,
"loss": 0.3563,
"step": 1275
},
{
"epoch": 1.191895668374476,
"grad_norm": 0.3257790715751293,
"learning_rate": 3.346565412495685e-05,
"loss": 0.3572,
"step": 1280
},
{
"epoch": 1.1965533302282254,
"grad_norm": 0.3022135881303542,
"learning_rate": 3.3379357956506735e-05,
"loss": 0.3604,
"step": 1285
},
{
"epoch": 1.201210992081975,
"grad_norm": 0.3177687317250914,
"learning_rate": 3.329306178805661e-05,
"loss": 0.3532,
"step": 1290
},
{
"epoch": 1.2058686539357242,
"grad_norm": 1.2396857071159404,
"learning_rate": 3.320676561960649e-05,
"loss": 0.3574,
"step": 1295
},
{
"epoch": 1.2105263157894737,
"grad_norm": 0.3664373829434122,
"learning_rate": 3.312046945115637e-05,
"loss": 0.3533,
"step": 1300
},
{
"epoch": 1.2151839776432232,
"grad_norm": 0.3023963308504667,
"learning_rate": 3.303417328270625e-05,
"loss": 0.3613,
"step": 1305
},
{
"epoch": 1.2198416394969724,
"grad_norm": 0.3082958509214965,
"learning_rate": 3.2947877114256126e-05,
"loss": 0.3687,
"step": 1310
},
{
"epoch": 1.224499301350722,
"grad_norm": 0.36918987030345646,
"learning_rate": 3.286158094580601e-05,
"loss": 0.4267,
"step": 1315
},
{
"epoch": 1.2291569632044714,
"grad_norm": 0.35602913397714164,
"learning_rate": 3.277528477735589e-05,
"loss": 0.3634,
"step": 1320
},
{
"epoch": 1.2338146250582207,
"grad_norm": 0.3105028968314904,
"learning_rate": 3.268898860890577e-05,
"loss": 0.355,
"step": 1325
},
{
"epoch": 1.2384722869119702,
"grad_norm": 0.3141898633738905,
"learning_rate": 3.2602692440455645e-05,
"loss": 0.3599,
"step": 1330
},
{
"epoch": 1.2431299487657197,
"grad_norm": 0.34247754511180467,
"learning_rate": 3.2516396272005524e-05,
"loss": 0.3547,
"step": 1335
},
{
"epoch": 1.247787610619469,
"grad_norm": 0.3434502610535775,
"learning_rate": 3.243010010355541e-05,
"loss": 0.3547,
"step": 1340
},
{
"epoch": 1.2524452724732185,
"grad_norm": 0.3083559532522247,
"learning_rate": 3.234380393510528e-05,
"loss": 0.3438,
"step": 1345
},
{
"epoch": 1.257102934326968,
"grad_norm": 8.089934674724914,
"learning_rate": 3.225750776665516e-05,
"loss": 0.3566,
"step": 1350
},
{
"epoch": 1.2617605961807172,
"grad_norm": 23.609984098862725,
"learning_rate": 3.217121159820504e-05,
"loss": 0.3954,
"step": 1355
},
{
"epoch": 1.2664182580344667,
"grad_norm": 0.6589262851727549,
"learning_rate": 3.208491542975492e-05,
"loss": 0.4094,
"step": 1360
},
{
"epoch": 1.271075919888216,
"grad_norm": 0.37969701440271525,
"learning_rate": 3.19986192613048e-05,
"loss": 0.3618,
"step": 1365
},
{
"epoch": 1.2757335817419655,
"grad_norm": 0.35419265438638503,
"learning_rate": 3.191232309285468e-05,
"loss": 0.357,
"step": 1370
},
{
"epoch": 1.280391243595715,
"grad_norm": 0.3113731460367887,
"learning_rate": 3.1826026924404556e-05,
"loss": 0.3547,
"step": 1375
},
{
"epoch": 1.2850489054494645,
"grad_norm": 0.3711646260264564,
"learning_rate": 3.173973075595444e-05,
"loss": 0.3557,
"step": 1380
},
{
"epoch": 1.2897065673032138,
"grad_norm": 0.28159626489540107,
"learning_rate": 3.165343458750431e-05,
"loss": 0.3523,
"step": 1385
},
{
"epoch": 1.2943642291569633,
"grad_norm": 0.4368347910396357,
"learning_rate": 3.156713841905419e-05,
"loss": 0.3612,
"step": 1390
},
{
"epoch": 1.2990218910107125,
"grad_norm": 0.292048370076934,
"learning_rate": 3.1480842250604076e-05,
"loss": 0.3551,
"step": 1395
},
{
"epoch": 1.303679552864462,
"grad_norm": 0.28783970311720675,
"learning_rate": 3.1394546082153954e-05,
"loss": 0.3566,
"step": 1400
},
{
"epoch": 1.3083372147182115,
"grad_norm": 0.39691810792427434,
"learning_rate": 3.130824991370383e-05,
"loss": 0.3499,
"step": 1405
},
{
"epoch": 1.312994876571961,
"grad_norm": 5.1812004529766,
"learning_rate": 3.122195374525371e-05,
"loss": 0.3509,
"step": 1410
},
{
"epoch": 1.3176525384257103,
"grad_norm": 3.5459716211561947,
"learning_rate": 3.113565757680359e-05,
"loss": 0.3695,
"step": 1415
},
{
"epoch": 1.3223102002794598,
"grad_norm": 0.4018389678575513,
"learning_rate": 3.1049361408353474e-05,
"loss": 0.358,
"step": 1420
},
{
"epoch": 1.326967862133209,
"grad_norm": 0.37316511070692127,
"learning_rate": 3.096306523990335e-05,
"loss": 0.3503,
"step": 1425
},
{
"epoch": 1.3316255239869585,
"grad_norm": 0.4253066965910869,
"learning_rate": 3.0876769071453223e-05,
"loss": 0.3519,
"step": 1430
},
{
"epoch": 1.336283185840708,
"grad_norm": 0.41012648718576017,
"learning_rate": 3.079047290300311e-05,
"loss": 0.3577,
"step": 1435
},
{
"epoch": 1.3409408476944573,
"grad_norm": 0.4660255215590989,
"learning_rate": 3.070417673455299e-05,
"loss": 0.3547,
"step": 1440
},
{
"epoch": 1.3455985095482068,
"grad_norm": 0.9606859956101274,
"learning_rate": 3.061788056610287e-05,
"loss": 0.3603,
"step": 1445
},
{
"epoch": 1.350256171401956,
"grad_norm": 0.2729737086954493,
"learning_rate": 3.053158439765274e-05,
"loss": 0.3536,
"step": 1450
},
{
"epoch": 1.3549138332557056,
"grad_norm": 0.3185706265133299,
"learning_rate": 3.0445288229202625e-05,
"loss": 0.363,
"step": 1455
},
{
"epoch": 1.359571495109455,
"grad_norm": 0.29368292075603664,
"learning_rate": 3.0358992060752506e-05,
"loss": 0.346,
"step": 1460
},
{
"epoch": 1.3642291569632046,
"grad_norm": 0.6188898818836498,
"learning_rate": 3.027269589230238e-05,
"loss": 0.3522,
"step": 1465
},
{
"epoch": 1.3688868188169538,
"grad_norm": 0.29275072649894884,
"learning_rate": 3.018639972385226e-05,
"loss": 0.3561,
"step": 1470
},
{
"epoch": 1.3735444806707033,
"grad_norm": 0.3123873178331315,
"learning_rate": 3.010010355540214e-05,
"loss": 0.3494,
"step": 1475
},
{
"epoch": 1.3782021425244526,
"grad_norm": 0.3339526911219289,
"learning_rate": 3.001380738695202e-05,
"loss": 0.3518,
"step": 1480
},
{
"epoch": 1.382859804378202,
"grad_norm": 0.41947552863552484,
"learning_rate": 2.99275112185019e-05,
"loss": 0.3576,
"step": 1485
},
{
"epoch": 1.3875174662319516,
"grad_norm": 0.290834349215534,
"learning_rate": 2.984121505005178e-05,
"loss": 0.358,
"step": 1490
},
{
"epoch": 1.392175128085701,
"grad_norm": 0.3257789570431207,
"learning_rate": 2.9754918881601657e-05,
"loss": 0.3589,
"step": 1495
},
{
"epoch": 1.3968327899394504,
"grad_norm": 0.2697638582059203,
"learning_rate": 2.966862271315154e-05,
"loss": 0.3468,
"step": 1500
},
{
"epoch": 1.4014904517931999,
"grad_norm": 0.34596498712608775,
"learning_rate": 2.9582326544701417e-05,
"loss": 0.3516,
"step": 1505
},
{
"epoch": 1.4061481136469491,
"grad_norm": 0.3424362255444567,
"learning_rate": 2.9496030376251292e-05,
"loss": 0.3505,
"step": 1510
},
{
"epoch": 1.4108057755006986,
"grad_norm": 0.28515100314876,
"learning_rate": 2.9409734207801177e-05,
"loss": 0.3591,
"step": 1515
},
{
"epoch": 1.4154634373544481,
"grad_norm": 0.27947785201095854,
"learning_rate": 2.9323438039351052e-05,
"loss": 0.3522,
"step": 1520
},
{
"epoch": 1.4201210992081974,
"grad_norm": 0.5037464671470212,
"learning_rate": 2.9237141870900937e-05,
"loss": 0.352,
"step": 1525
},
{
"epoch": 1.424778761061947,
"grad_norm": 0.3427793038690582,
"learning_rate": 2.9150845702450812e-05,
"loss": 0.3435,
"step": 1530
},
{
"epoch": 1.4294364229156964,
"grad_norm": 0.2882568116228797,
"learning_rate": 2.906454953400069e-05,
"loss": 0.3584,
"step": 1535
},
{
"epoch": 1.4340940847694457,
"grad_norm": 0.3135134737473443,
"learning_rate": 2.897825336555057e-05,
"loss": 0.3499,
"step": 1540
},
{
"epoch": 1.4387517466231952,
"grad_norm": 0.30701305078292784,
"learning_rate": 2.889195719710045e-05,
"loss": 0.3498,
"step": 1545
},
{
"epoch": 1.4434094084769447,
"grad_norm": 0.9739810284082362,
"learning_rate": 2.8805661028650328e-05,
"loss": 0.3461,
"step": 1550
},
{
"epoch": 1.448067070330694,
"grad_norm": 0.30165717081554694,
"learning_rate": 2.871936486020021e-05,
"loss": 0.342,
"step": 1555
},
{
"epoch": 1.4527247321844434,
"grad_norm": 0.2613673867436863,
"learning_rate": 2.8633068691750088e-05,
"loss": 0.3489,
"step": 1560
},
{
"epoch": 1.4573823940381927,
"grad_norm": 0.32713429396994886,
"learning_rate": 2.854677252329997e-05,
"loss": 0.357,
"step": 1565
},
{
"epoch": 1.4620400558919422,
"grad_norm": 3.226986295868854,
"learning_rate": 2.8460476354849848e-05,
"loss": 0.3683,
"step": 1570
},
{
"epoch": 1.4666977177456917,
"grad_norm": 0.26293280030526456,
"learning_rate": 2.8374180186399723e-05,
"loss": 0.3511,
"step": 1575
},
{
"epoch": 1.4713553795994412,
"grad_norm": 0.28666579972329054,
"learning_rate": 2.8287884017949608e-05,
"loss": 0.344,
"step": 1580
},
{
"epoch": 1.4760130414531905,
"grad_norm": 0.27704759464972445,
"learning_rate": 2.8201587849499482e-05,
"loss": 0.3524,
"step": 1585
},
{
"epoch": 1.48067070330694,
"grad_norm": 0.28594874213631005,
"learning_rate": 2.811529168104936e-05,
"loss": 0.3512,
"step": 1590
},
{
"epoch": 1.4853283651606892,
"grad_norm": 0.27066447710153146,
"learning_rate": 2.8028995512599242e-05,
"loss": 0.3493,
"step": 1595
},
{
"epoch": 1.4899860270144387,
"grad_norm": 0.2600899746581506,
"learning_rate": 2.794269934414912e-05,
"loss": 0.3546,
"step": 1600
},
{
"epoch": 1.4946436888681882,
"grad_norm": 0.2500046923889755,
"learning_rate": 2.7856403175699002e-05,
"loss": 0.3465,
"step": 1605
},
{
"epoch": 1.4993013507219377,
"grad_norm": 0.2567828872835996,
"learning_rate": 2.777010700724888e-05,
"loss": 0.352,
"step": 1610
},
{
"epoch": 1.503959012575687,
"grad_norm": 0.2779666234603821,
"learning_rate": 2.768381083879876e-05,
"loss": 0.3507,
"step": 1615
},
{
"epoch": 1.5086166744294365,
"grad_norm": 0.27186485130837373,
"learning_rate": 2.759751467034864e-05,
"loss": 0.3516,
"step": 1620
},
{
"epoch": 1.5132743362831858,
"grad_norm": 0.24645955549365214,
"learning_rate": 2.751121850189852e-05,
"loss": 0.3546,
"step": 1625
},
{
"epoch": 1.5179319981369352,
"grad_norm": 0.2542323054350234,
"learning_rate": 2.7424922333448393e-05,
"loss": 0.3515,
"step": 1630
},
{
"epoch": 1.5225896599906847,
"grad_norm": 0.25933000013528096,
"learning_rate": 2.7338626164998278e-05,
"loss": 0.3616,
"step": 1635
},
{
"epoch": 1.5272473218444342,
"grad_norm": 0.2578174399758813,
"learning_rate": 2.7252329996548153e-05,
"loss": 0.3555,
"step": 1640
},
{
"epoch": 1.5319049836981835,
"grad_norm": 0.27068988078684453,
"learning_rate": 2.7166033828098038e-05,
"loss": 0.359,
"step": 1645
},
{
"epoch": 1.5365626455519328,
"grad_norm": 0.2677801002022169,
"learning_rate": 2.7079737659647913e-05,
"loss": 0.3437,
"step": 1650
},
{
"epoch": 1.5412203074056823,
"grad_norm": 0.2660552731038306,
"learning_rate": 2.699344149119779e-05,
"loss": 0.3424,
"step": 1655
},
{
"epoch": 1.5458779692594318,
"grad_norm": 0.28523039735998723,
"learning_rate": 2.6907145322747673e-05,
"loss": 0.3418,
"step": 1660
},
{
"epoch": 1.5505356311131813,
"grad_norm": 0.28731844231489023,
"learning_rate": 2.682084915429755e-05,
"loss": 0.3543,
"step": 1665
},
{
"epoch": 1.5551932929669308,
"grad_norm": 1.4775974232314364,
"learning_rate": 2.673455298584743e-05,
"loss": 0.3518,
"step": 1670
},
{
"epoch": 1.55985095482068,
"grad_norm": 0.24943372088294952,
"learning_rate": 2.664825681739731e-05,
"loss": 0.348,
"step": 1675
},
{
"epoch": 1.5645086166744293,
"grad_norm": 0.27406670344929485,
"learning_rate": 2.6561960648947186e-05,
"loss": 0.3548,
"step": 1680
},
{
"epoch": 1.5691662785281788,
"grad_norm": 0.26608558873313726,
"learning_rate": 2.647566448049707e-05,
"loss": 0.3444,
"step": 1685
},
{
"epoch": 1.5738239403819283,
"grad_norm": 0.27726086768562713,
"learning_rate": 2.6389368312046945e-05,
"loss": 0.3522,
"step": 1690
},
{
"epoch": 1.5784816022356778,
"grad_norm": 0.24678701765325478,
"learning_rate": 2.6303072143596824e-05,
"loss": 0.3487,
"step": 1695
},
{
"epoch": 1.583139264089427,
"grad_norm": 0.2712396806672742,
"learning_rate": 2.6216775975146705e-05,
"loss": 0.3519,
"step": 1700
},
{
"epoch": 1.5877969259431766,
"grad_norm": 0.2517013095851643,
"learning_rate": 2.6130479806696584e-05,
"loss": 0.3463,
"step": 1705
},
{
"epoch": 1.5924545877969258,
"grad_norm": 0.250371729590157,
"learning_rate": 2.6044183638246462e-05,
"loss": 0.3538,
"step": 1710
},
{
"epoch": 1.5971122496506753,
"grad_norm": 0.275342753890394,
"learning_rate": 2.5957887469796343e-05,
"loss": 0.3555,
"step": 1715
},
{
"epoch": 1.6017699115044248,
"grad_norm": 0.24313053813658697,
"learning_rate": 2.587159130134622e-05,
"loss": 0.342,
"step": 1720
},
{
"epoch": 1.6064275733581743,
"grad_norm": 0.2722072414317194,
"learning_rate": 2.5785295132896096e-05,
"loss": 0.3488,
"step": 1725
},
{
"epoch": 1.6110852352119236,
"grad_norm": 0.2987110807378229,
"learning_rate": 2.569899896444598e-05,
"loss": 0.3599,
"step": 1730
},
{
"epoch": 1.6157428970656729,
"grad_norm": 0.2774231229703875,
"learning_rate": 2.5612702795995856e-05,
"loss": 0.3587,
"step": 1735
},
{
"epoch": 1.6204005589194224,
"grad_norm": 0.27635376671669754,
"learning_rate": 2.552640662754574e-05,
"loss": 0.3402,
"step": 1740
},
{
"epoch": 1.6250582207731719,
"grad_norm": 0.25478389391029976,
"learning_rate": 2.5440110459095616e-05,
"loss": 0.3687,
"step": 1745
},
{
"epoch": 1.6297158826269214,
"grad_norm": 0.2781430915699508,
"learning_rate": 2.5353814290645494e-05,
"loss": 0.3483,
"step": 1750
},
{
"epoch": 1.6343735444806708,
"grad_norm": 0.2586169106331023,
"learning_rate": 2.5267518122195376e-05,
"loss": 0.356,
"step": 1755
},
{
"epoch": 1.6390312063344201,
"grad_norm": 0.25373603422413227,
"learning_rate": 2.5181221953745254e-05,
"loss": 0.3433,
"step": 1760
},
{
"epoch": 1.6436888681881694,
"grad_norm": 0.25761180042874776,
"learning_rate": 2.5094925785295132e-05,
"loss": 0.3454,
"step": 1765
},
{
"epoch": 1.648346530041919,
"grad_norm": 0.2832378466169248,
"learning_rate": 2.5008629616845014e-05,
"loss": 0.3419,
"step": 1770
},
{
"epoch": 1.6530041918956684,
"grad_norm": 0.24635731465935254,
"learning_rate": 2.4922333448394892e-05,
"loss": 0.3561,
"step": 1775
},
{
"epoch": 1.6576618537494179,
"grad_norm": 0.9005827663380318,
"learning_rate": 2.483603727994477e-05,
"loss": 0.3441,
"step": 1780
},
{
"epoch": 1.6623195156031674,
"grad_norm": 0.3437481829556501,
"learning_rate": 2.4749741111494652e-05,
"loss": 0.3573,
"step": 1785
},
{
"epoch": 1.6669771774569166,
"grad_norm": 0.36819542687532286,
"learning_rate": 2.466344494304453e-05,
"loss": 0.3461,
"step": 1790
},
{
"epoch": 1.671634839310666,
"grad_norm": 0.3068015778936282,
"learning_rate": 2.457714877459441e-05,
"loss": 0.3444,
"step": 1795
},
{
"epoch": 1.6762925011644154,
"grad_norm": 0.2618150436269955,
"learning_rate": 2.4490852606144287e-05,
"loss": 0.3426,
"step": 1800
},
{
"epoch": 1.680950163018165,
"grad_norm": 0.25451524024562244,
"learning_rate": 2.440455643769417e-05,
"loss": 0.3511,
"step": 1805
},
{
"epoch": 1.6856078248719144,
"grad_norm": 0.2695440900949316,
"learning_rate": 2.4318260269244047e-05,
"loss": 0.3477,
"step": 1810
},
{
"epoch": 1.6902654867256637,
"grad_norm": 0.2674915183495278,
"learning_rate": 2.4231964100793925e-05,
"loss": 0.3485,
"step": 1815
},
{
"epoch": 1.6949231485794132,
"grad_norm": 0.31579487413450547,
"learning_rate": 2.4145667932343803e-05,
"loss": 0.3375,
"step": 1820
},
{
"epoch": 1.6995808104331624,
"grad_norm": 0.2731513416518806,
"learning_rate": 2.4059371763893685e-05,
"loss": 0.3404,
"step": 1825
},
{
"epoch": 1.704238472286912,
"grad_norm": 0.26736069371041915,
"learning_rate": 2.3973075595443563e-05,
"loss": 0.3432,
"step": 1830
},
{
"epoch": 1.7088961341406614,
"grad_norm": 0.2871495858381633,
"learning_rate": 2.388677942699344e-05,
"loss": 0.3508,
"step": 1835
},
{
"epoch": 1.713553795994411,
"grad_norm": 0.24116315103304178,
"learning_rate": 2.3800483258543323e-05,
"loss": 0.3446,
"step": 1840
},
{
"epoch": 1.7182114578481602,
"grad_norm": 0.25078460812146336,
"learning_rate": 2.37141870900932e-05,
"loss": 0.349,
"step": 1845
},
{
"epoch": 1.7228691197019095,
"grad_norm": 0.29558613517452564,
"learning_rate": 2.3627890921643083e-05,
"loss": 0.357,
"step": 1850
},
{
"epoch": 1.727526781555659,
"grad_norm": 0.25402857290536573,
"learning_rate": 2.3541594753192957e-05,
"loss": 0.341,
"step": 1855
},
{
"epoch": 1.7321844434094085,
"grad_norm": 0.26708730846147416,
"learning_rate": 2.345529858474284e-05,
"loss": 0.3625,
"step": 1860
},
{
"epoch": 1.736842105263158,
"grad_norm": 0.25810926942570817,
"learning_rate": 2.3369002416292717e-05,
"loss": 0.3424,
"step": 1865
},
{
"epoch": 1.7414997671169075,
"grad_norm": 0.25777408289568604,
"learning_rate": 2.32827062478426e-05,
"loss": 0.3469,
"step": 1870
},
{
"epoch": 1.7461574289706567,
"grad_norm": 0.28033996112574766,
"learning_rate": 2.3196410079392474e-05,
"loss": 0.3604,
"step": 1875
},
{
"epoch": 1.750815090824406,
"grad_norm": 0.23366951536520036,
"learning_rate": 2.3110113910942355e-05,
"loss": 0.3525,
"step": 1880
},
{
"epoch": 1.7554727526781555,
"grad_norm": 0.25296859342059685,
"learning_rate": 2.3023817742492234e-05,
"loss": 0.3531,
"step": 1885
},
{
"epoch": 1.760130414531905,
"grad_norm": 0.24983643896715807,
"learning_rate": 2.2937521574042115e-05,
"loss": 0.3512,
"step": 1890
},
{
"epoch": 1.7647880763856545,
"grad_norm": 0.2716754177639391,
"learning_rate": 2.285122540559199e-05,
"loss": 0.3589,
"step": 1895
},
{
"epoch": 1.7694457382394038,
"grad_norm": 0.24135095593523273,
"learning_rate": 2.276492923714187e-05,
"loss": 0.3492,
"step": 1900
},
{
"epoch": 1.7741034000931533,
"grad_norm": 0.26280121470301687,
"learning_rate": 2.267863306869175e-05,
"loss": 0.35,
"step": 1905
},
{
"epoch": 1.7787610619469025,
"grad_norm": 0.24986188415439609,
"learning_rate": 2.259233690024163e-05,
"loss": 0.341,
"step": 1910
},
{
"epoch": 1.783418723800652,
"grad_norm": 0.24809669885312474,
"learning_rate": 2.250604073179151e-05,
"loss": 0.3383,
"step": 1915
},
{
"epoch": 1.7880763856544015,
"grad_norm": 1.4295723229351893,
"learning_rate": 2.2419744563341388e-05,
"loss": 0.3429,
"step": 1920
},
{
"epoch": 1.792734047508151,
"grad_norm": 0.29048553056880866,
"learning_rate": 2.233344839489127e-05,
"loss": 0.3522,
"step": 1925
},
{
"epoch": 1.7973917093619003,
"grad_norm": 0.2738000474300528,
"learning_rate": 2.2247152226441148e-05,
"loss": 0.3406,
"step": 1930
},
{
"epoch": 1.8020493712156498,
"grad_norm": 0.26755797749035115,
"learning_rate": 2.2160856057991026e-05,
"loss": 0.339,
"step": 1935
},
{
"epoch": 1.806707033069399,
"grad_norm": 0.24051646148161943,
"learning_rate": 2.2074559889540904e-05,
"loss": 0.3415,
"step": 1940
},
{
"epoch": 1.8113646949231486,
"grad_norm": 0.4480396154759198,
"learning_rate": 2.1988263721090786e-05,
"loss": 0.3504,
"step": 1945
},
{
"epoch": 1.816022356776898,
"grad_norm": 0.2585475391328862,
"learning_rate": 2.1901967552640664e-05,
"loss": 0.3423,
"step": 1950
},
{
"epoch": 1.8206800186306475,
"grad_norm": 0.23815718303661385,
"learning_rate": 2.1815671384190542e-05,
"loss": 0.3447,
"step": 1955
},
{
"epoch": 1.8253376804843968,
"grad_norm": 0.308300042750051,
"learning_rate": 2.172937521574042e-05,
"loss": 0.347,
"step": 1960
},
{
"epoch": 1.829995342338146,
"grad_norm": 0.25553311828654945,
"learning_rate": 2.1643079047290302e-05,
"loss": 0.3487,
"step": 1965
},
{
"epoch": 1.8346530041918956,
"grad_norm": 0.3588389772067993,
"learning_rate": 2.155678287884018e-05,
"loss": 0.3434,
"step": 1970
},
{
"epoch": 1.839310666045645,
"grad_norm": 0.26552906632603784,
"learning_rate": 2.147048671039006e-05,
"loss": 0.3503,
"step": 1975
},
{
"epoch": 1.8439683278993946,
"grad_norm": 0.25976308131809595,
"learning_rate": 2.1384190541939937e-05,
"loss": 0.3418,
"step": 1980
},
{
"epoch": 1.848625989753144,
"grad_norm": 0.2608984454672737,
"learning_rate": 2.129789437348982e-05,
"loss": 0.3539,
"step": 1985
},
{
"epoch": 1.8532836516068933,
"grad_norm": 0.6841187702059441,
"learning_rate": 2.1211598205039697e-05,
"loss": 0.3506,
"step": 1990
},
{
"epoch": 1.8579413134606426,
"grad_norm": 0.29300984860851426,
"learning_rate": 2.1125302036589575e-05,
"loss": 0.3378,
"step": 1995
},
{
"epoch": 1.8625989753143921,
"grad_norm": 0.25853621490849804,
"learning_rate": 2.1039005868139457e-05,
"loss": 0.3471,
"step": 2000
},
{
"epoch": 1.8672566371681416,
"grad_norm": 0.271087074061142,
"learning_rate": 2.0952709699689335e-05,
"loss": 0.3526,
"step": 2005
},
{
"epoch": 1.871914299021891,
"grad_norm": 0.2601235728752933,
"learning_rate": 2.0866413531239216e-05,
"loss": 0.3417,
"step": 2010
},
{
"epoch": 1.8765719608756404,
"grad_norm": 0.2588273475069737,
"learning_rate": 2.078011736278909e-05,
"loss": 0.3526,
"step": 2015
},
{
"epoch": 1.8812296227293899,
"grad_norm": 0.4189872439881,
"learning_rate": 2.0693821194338973e-05,
"loss": 0.3411,
"step": 2020
},
{
"epoch": 1.8858872845831391,
"grad_norm": 0.26516086453032495,
"learning_rate": 2.060752502588885e-05,
"loss": 0.3452,
"step": 2025
},
{
"epoch": 1.8905449464368886,
"grad_norm": 0.25754850179921396,
"learning_rate": 2.0521228857438733e-05,
"loss": 0.3428,
"step": 2030
},
{
"epoch": 1.8952026082906381,
"grad_norm": 0.2254389135020473,
"learning_rate": 2.0434932688988608e-05,
"loss": 0.3407,
"step": 2035
},
{
"epoch": 1.8998602701443876,
"grad_norm": 1.2877944565555752,
"learning_rate": 2.034863652053849e-05,
"loss": 0.3553,
"step": 2040
},
{
"epoch": 1.904517931998137,
"grad_norm": 0.23303512944550436,
"learning_rate": 2.0262340352088367e-05,
"loss": 0.3512,
"step": 2045
},
{
"epoch": 1.9091755938518864,
"grad_norm": 0.39910536743505765,
"learning_rate": 2.017604418363825e-05,
"loss": 0.3472,
"step": 2050
},
{
"epoch": 1.9138332557056357,
"grad_norm": 0.23961981952299882,
"learning_rate": 2.0089748015188127e-05,
"loss": 0.3465,
"step": 2055
},
{
"epoch": 1.9184909175593852,
"grad_norm": 0.2790757827986385,
"learning_rate": 2.0003451846738005e-05,
"loss": 0.3416,
"step": 2060
},
{
"epoch": 1.9231485794131347,
"grad_norm": 0.26541183998192874,
"learning_rate": 1.9917155678287887e-05,
"loss": 0.3426,
"step": 2065
},
{
"epoch": 1.9278062412668842,
"grad_norm": 0.26020009713009595,
"learning_rate": 1.9830859509837765e-05,
"loss": 0.3454,
"step": 2070
},
{
"epoch": 1.9324639031206334,
"grad_norm": 0.26844426587023945,
"learning_rate": 1.9744563341387643e-05,
"loss": 0.3453,
"step": 2075
},
{
"epoch": 1.9371215649743827,
"grad_norm": 0.2526468533029043,
"learning_rate": 1.9658267172937522e-05,
"loss": 0.339,
"step": 2080
},
{
"epoch": 1.9417792268281322,
"grad_norm": 0.25099498909407186,
"learning_rate": 1.9571971004487403e-05,
"loss": 0.3492,
"step": 2085
},
{
"epoch": 1.9464368886818817,
"grad_norm": 0.22893003769931286,
"learning_rate": 1.948567483603728e-05,
"loss": 0.3337,
"step": 2090
},
{
"epoch": 1.9510945505356312,
"grad_norm": 0.2499469298292734,
"learning_rate": 1.939937866758716e-05,
"loss": 0.3414,
"step": 2095
},
{
"epoch": 1.9557522123893807,
"grad_norm": 3.01516996652908,
"learning_rate": 1.9313082499137038e-05,
"loss": 0.3425,
"step": 2100
},
{
"epoch": 1.96040987424313,
"grad_norm": 0.235620525897121,
"learning_rate": 1.922678633068692e-05,
"loss": 0.3507,
"step": 2105
},
{
"epoch": 1.9650675360968792,
"grad_norm": 0.2271304553516188,
"learning_rate": 1.9140490162236798e-05,
"loss": 0.336,
"step": 2110
},
{
"epoch": 1.9697251979506287,
"grad_norm": 0.26095880436114127,
"learning_rate": 1.9054193993786676e-05,
"loss": 0.3429,
"step": 2115
},
{
"epoch": 1.9743828598043782,
"grad_norm": 0.2481845530711676,
"learning_rate": 1.8967897825336554e-05,
"loss": 0.346,
"step": 2120
},
{
"epoch": 1.9790405216581277,
"grad_norm": 0.2646587212417876,
"learning_rate": 1.8881601656886436e-05,
"loss": 0.3417,
"step": 2125
},
{
"epoch": 1.983698183511877,
"grad_norm": 0.23802879335165028,
"learning_rate": 1.8795305488436314e-05,
"loss": 0.3436,
"step": 2130
},
{
"epoch": 1.9883558453656265,
"grad_norm": 0.24832645092634265,
"learning_rate": 1.8709009319986192e-05,
"loss": 0.3487,
"step": 2135
},
{
"epoch": 1.9930135072193758,
"grad_norm": 0.2849361054588884,
"learning_rate": 1.8622713151536074e-05,
"loss": 0.347,
"step": 2140
},
{
"epoch": 1.9976711690731253,
"grad_norm": 0.24796434427364134,
"learning_rate": 1.8536416983085952e-05,
"loss": 0.3487,
"step": 2145
},
{
"epoch": 2.0018630647415,
"grad_norm": 0.4670077104840539,
"learning_rate": 1.8450120814635834e-05,
"loss": 0.3226,
"step": 2150
},
{
"epoch": 2.0065207265952494,
"grad_norm": 0.45452033829534516,
"learning_rate": 1.836382464618571e-05,
"loss": 0.2786,
"step": 2155
},
{
"epoch": 2.0111783884489984,
"grad_norm": 0.2794876251591008,
"learning_rate": 1.827752847773559e-05,
"loss": 0.2715,
"step": 2160
},
{
"epoch": 2.015836050302748,
"grad_norm": 0.2801100784062534,
"learning_rate": 1.819123230928547e-05,
"loss": 0.272,
"step": 2165
},
{
"epoch": 2.0204937121564974,
"grad_norm": 0.2817318982378413,
"learning_rate": 1.810493614083535e-05,
"loss": 0.2638,
"step": 2170
},
{
"epoch": 2.025151374010247,
"grad_norm": 0.274443503547923,
"learning_rate": 1.8018639972385225e-05,
"loss": 0.2749,
"step": 2175
},
{
"epoch": 2.0298090358639964,
"grad_norm": 0.2693048090720915,
"learning_rate": 1.7932343803935107e-05,
"loss": 0.2662,
"step": 2180
},
{
"epoch": 2.034466697717746,
"grad_norm": 0.29616883885176765,
"learning_rate": 1.7846047635484985e-05,
"loss": 0.2698,
"step": 2185
},
{
"epoch": 2.039124359571495,
"grad_norm": 0.2861132373580431,
"learning_rate": 1.7759751467034866e-05,
"loss": 0.2747,
"step": 2190
},
{
"epoch": 2.0437820214252445,
"grad_norm": 0.2673065715270604,
"learning_rate": 1.767345529858474e-05,
"loss": 0.2707,
"step": 2195
},
{
"epoch": 2.048439683278994,
"grad_norm": 0.2984804602495079,
"learning_rate": 1.7587159130134623e-05,
"loss": 0.2679,
"step": 2200
},
{
"epoch": 2.0530973451327434,
"grad_norm": 0.2651492582204811,
"learning_rate": 1.75008629616845e-05,
"loss": 0.2722,
"step": 2205
},
{
"epoch": 2.057755006986493,
"grad_norm": 0.2535902737834232,
"learning_rate": 1.7414566793234383e-05,
"loss": 0.2616,
"step": 2210
},
{
"epoch": 2.062412668840242,
"grad_norm": 0.38724908413622083,
"learning_rate": 1.732827062478426e-05,
"loss": 0.2706,
"step": 2215
},
{
"epoch": 2.0670703306939915,
"grad_norm": 0.29418307464847004,
"learning_rate": 1.724197445633414e-05,
"loss": 0.2686,
"step": 2220
},
{
"epoch": 2.071727992547741,
"grad_norm": 0.2685619174819881,
"learning_rate": 1.715567828788402e-05,
"loss": 0.2668,
"step": 2225
},
{
"epoch": 2.0763856544014905,
"grad_norm": 0.258294334458138,
"learning_rate": 1.70693821194339e-05,
"loss": 0.2696,
"step": 2230
},
{
"epoch": 2.08104331625524,
"grad_norm": 0.2654842147579374,
"learning_rate": 1.6983085950983777e-05,
"loss": 0.2638,
"step": 2235
},
{
"epoch": 2.0857009781089895,
"grad_norm": 0.29454304913296336,
"learning_rate": 1.6896789782533655e-05,
"loss": 0.2744,
"step": 2240
},
{
"epoch": 2.0903586399627385,
"grad_norm": 0.2853537828947393,
"learning_rate": 1.6810493614083537e-05,
"loss": 0.2704,
"step": 2245
},
{
"epoch": 2.095016301816488,
"grad_norm": 0.287794357838766,
"learning_rate": 1.6724197445633415e-05,
"loss": 0.2649,
"step": 2250
},
{
"epoch": 2.0996739636702375,
"grad_norm": 0.2689915249768445,
"learning_rate": 1.6637901277183294e-05,
"loss": 0.2755,
"step": 2255
},
{
"epoch": 2.104331625523987,
"grad_norm": 0.2763831642932531,
"learning_rate": 1.6551605108733172e-05,
"loss": 0.2668,
"step": 2260
},
{
"epoch": 2.1089892873777365,
"grad_norm": 0.27304407290976945,
"learning_rate": 1.6465308940283053e-05,
"loss": 0.2745,
"step": 2265
},
{
"epoch": 2.113646949231486,
"grad_norm": 0.2647848732867209,
"learning_rate": 1.637901277183293e-05,
"loss": 0.2682,
"step": 2270
},
{
"epoch": 2.118304611085235,
"grad_norm": 0.5195120921816434,
"learning_rate": 1.629271660338281e-05,
"loss": 0.2694,
"step": 2275
},
{
"epoch": 2.1229622729389845,
"grad_norm": 0.2525362731340643,
"learning_rate": 1.620642043493269e-05,
"loss": 0.27,
"step": 2280
},
{
"epoch": 2.127619934792734,
"grad_norm": 0.271141237976331,
"learning_rate": 1.612012426648257e-05,
"loss": 0.2736,
"step": 2285
},
{
"epoch": 2.1322775966464835,
"grad_norm": 0.29015424881388735,
"learning_rate": 1.6033828098032448e-05,
"loss": 0.2721,
"step": 2290
},
{
"epoch": 2.136935258500233,
"grad_norm": 0.25251985345507644,
"learning_rate": 1.5947531929582326e-05,
"loss": 0.2746,
"step": 2295
},
{
"epoch": 2.1415929203539825,
"grad_norm": 0.264331389896045,
"learning_rate": 1.5861235761132208e-05,
"loss": 0.2756,
"step": 2300
},
{
"epoch": 2.1462505822077316,
"grad_norm": 0.2818467534538804,
"learning_rate": 1.5774939592682086e-05,
"loss": 0.2736,
"step": 2305
},
{
"epoch": 2.150908244061481,
"grad_norm": 0.24602014197101432,
"learning_rate": 1.5688643424231964e-05,
"loss": 0.2615,
"step": 2310
},
{
"epoch": 2.1555659059152306,
"grad_norm": 0.26167388498791416,
"learning_rate": 1.5602347255781842e-05,
"loss": 0.2674,
"step": 2315
},
{
"epoch": 2.16022356776898,
"grad_norm": 0.2517441075418996,
"learning_rate": 1.5516051087331724e-05,
"loss": 0.2658,
"step": 2320
},
{
"epoch": 2.1648812296227296,
"grad_norm": 0.2753289696569991,
"learning_rate": 1.5429754918881602e-05,
"loss": 0.2706,
"step": 2325
},
{
"epoch": 2.1695388914764786,
"grad_norm": 0.24519322947304775,
"learning_rate": 1.534345875043148e-05,
"loss": 0.2711,
"step": 2330
},
{
"epoch": 2.174196553330228,
"grad_norm": 0.2709491329166551,
"learning_rate": 1.525716258198136e-05,
"loss": 0.2653,
"step": 2335
},
{
"epoch": 2.1788542151839776,
"grad_norm": 0.313781192883122,
"learning_rate": 1.517086641353124e-05,
"loss": 0.2686,
"step": 2340
},
{
"epoch": 2.183511877037727,
"grad_norm": 0.2652081894883003,
"learning_rate": 1.508457024508112e-05,
"loss": 0.2766,
"step": 2345
},
{
"epoch": 2.1881695388914766,
"grad_norm": 0.3001191925728167,
"learning_rate": 1.4998274076630997e-05,
"loss": 0.2677,
"step": 2350
},
{
"epoch": 2.192827200745226,
"grad_norm": 0.2451904092276869,
"learning_rate": 1.4911977908180877e-05,
"loss": 0.2672,
"step": 2355
},
{
"epoch": 2.197484862598975,
"grad_norm": 0.6591340342436288,
"learning_rate": 1.4825681739730757e-05,
"loss": 0.2762,
"step": 2360
},
{
"epoch": 2.2021425244527246,
"grad_norm": 0.300791091079865,
"learning_rate": 1.4739385571280637e-05,
"loss": 0.2734,
"step": 2365
},
{
"epoch": 2.206800186306474,
"grad_norm": 0.2653884810898289,
"learning_rate": 1.4653089402830513e-05,
"loss": 0.2727,
"step": 2370
},
{
"epoch": 2.2114578481602236,
"grad_norm": 0.2764326256981041,
"learning_rate": 1.4566793234380393e-05,
"loss": 0.2719,
"step": 2375
},
{
"epoch": 2.216115510013973,
"grad_norm": 0.2837879018636987,
"learning_rate": 1.4480497065930273e-05,
"loss": 0.2688,
"step": 2380
},
{
"epoch": 2.2207731718677226,
"grad_norm": 0.2548119239798833,
"learning_rate": 1.4394200897480153e-05,
"loss": 0.2814,
"step": 2385
},
{
"epoch": 2.2254308337214717,
"grad_norm": 0.2336755783098942,
"learning_rate": 1.4307904729030031e-05,
"loss": 0.2753,
"step": 2390
},
{
"epoch": 2.230088495575221,
"grad_norm": 0.2656480140545371,
"learning_rate": 1.4221608560579911e-05,
"loss": 0.273,
"step": 2395
},
{
"epoch": 2.2347461574289706,
"grad_norm": 0.2569155777441175,
"learning_rate": 1.4135312392129791e-05,
"loss": 0.2612,
"step": 2400
},
{
"epoch": 2.23940381928272,
"grad_norm": 0.2492998571163356,
"learning_rate": 1.404901622367967e-05,
"loss": 0.27,
"step": 2405
},
{
"epoch": 2.2440614811364696,
"grad_norm": 0.25590130320854276,
"learning_rate": 1.3962720055229547e-05,
"loss": 0.2713,
"step": 2410
},
{
"epoch": 2.248719142990219,
"grad_norm": 0.2539137547467478,
"learning_rate": 1.3876423886779427e-05,
"loss": 0.2692,
"step": 2415
},
{
"epoch": 2.253376804843968,
"grad_norm": 0.25764579758807804,
"learning_rate": 1.3790127718329307e-05,
"loss": 0.261,
"step": 2420
},
{
"epoch": 2.2580344666977177,
"grad_norm": 0.25551995299495456,
"learning_rate": 1.3703831549879187e-05,
"loss": 0.2724,
"step": 2425
},
{
"epoch": 2.262692128551467,
"grad_norm": 0.24266270441527438,
"learning_rate": 1.3617535381429064e-05,
"loss": 0.2642,
"step": 2430
},
{
"epoch": 2.2673497904052167,
"grad_norm": 0.2588361279242763,
"learning_rate": 1.3531239212978944e-05,
"loss": 0.2718,
"step": 2435
},
{
"epoch": 2.272007452258966,
"grad_norm": 0.28429578662647526,
"learning_rate": 1.3444943044528824e-05,
"loss": 0.2705,
"step": 2440
},
{
"epoch": 2.276665114112715,
"grad_norm": 0.24525404341789867,
"learning_rate": 1.3358646876078703e-05,
"loss": 0.2693,
"step": 2445
},
{
"epoch": 2.2813227759664647,
"grad_norm": 0.24607287108948245,
"learning_rate": 1.327235070762858e-05,
"loss": 0.2666,
"step": 2450
},
{
"epoch": 2.285980437820214,
"grad_norm": 0.24282683752499631,
"learning_rate": 1.318605453917846e-05,
"loss": 0.2697,
"step": 2455
},
{
"epoch": 2.2906380996739637,
"grad_norm": 0.305835699533878,
"learning_rate": 1.309975837072834e-05,
"loss": 0.2663,
"step": 2460
},
{
"epoch": 2.295295761527713,
"grad_norm": 0.2669796220360565,
"learning_rate": 1.301346220227822e-05,
"loss": 0.2782,
"step": 2465
},
{
"epoch": 2.2999534233814627,
"grad_norm": 0.3195367557398448,
"learning_rate": 1.2927166033828098e-05,
"loss": 0.269,
"step": 2470
},
{
"epoch": 2.3046110852352117,
"grad_norm": 0.260616710535662,
"learning_rate": 1.2840869865377978e-05,
"loss": 0.2743,
"step": 2475
},
{
"epoch": 2.3092687470889612,
"grad_norm": 0.2802244774876612,
"learning_rate": 1.2754573696927858e-05,
"loss": 0.2663,
"step": 2480
},
{
"epoch": 2.3139264089427107,
"grad_norm": 0.24626536789973147,
"learning_rate": 1.2668277528477738e-05,
"loss": 0.2741,
"step": 2485
},
{
"epoch": 2.3185840707964602,
"grad_norm": 0.2525988837688525,
"learning_rate": 1.2581981360027614e-05,
"loss": 0.2763,
"step": 2490
},
{
"epoch": 2.3232417326502097,
"grad_norm": 0.2419543341208871,
"learning_rate": 1.2495685191577494e-05,
"loss": 0.2684,
"step": 2495
},
{
"epoch": 2.3278993945039588,
"grad_norm": 0.22636835901167812,
"learning_rate": 1.2409389023127374e-05,
"loss": 0.2623,
"step": 2500
},
{
"epoch": 2.3325570563577083,
"grad_norm": 0.24683053840387006,
"learning_rate": 1.2323092854677252e-05,
"loss": 0.2689,
"step": 2505
},
{
"epoch": 2.3372147182114578,
"grad_norm": 0.39925021955339624,
"learning_rate": 1.2236796686227132e-05,
"loss": 0.2716,
"step": 2510
},
{
"epoch": 2.3418723800652073,
"grad_norm": 0.2460428498113885,
"learning_rate": 1.215050051777701e-05,
"loss": 0.2698,
"step": 2515
},
{
"epoch": 2.3465300419189568,
"grad_norm": 0.24081391446730407,
"learning_rate": 1.206420434932689e-05,
"loss": 0.2701,
"step": 2520
},
{
"epoch": 2.3511877037727063,
"grad_norm": 0.25896045081013075,
"learning_rate": 1.1977908180876769e-05,
"loss": 0.2727,
"step": 2525
},
{
"epoch": 2.3558453656264557,
"grad_norm": 0.2855436451430716,
"learning_rate": 1.1891612012426649e-05,
"loss": 0.2706,
"step": 2530
},
{
"epoch": 2.360503027480205,
"grad_norm": 0.26964620631891395,
"learning_rate": 1.1805315843976528e-05,
"loss": 0.2645,
"step": 2535
},
{
"epoch": 2.3651606893339543,
"grad_norm": 0.2553273475265153,
"learning_rate": 1.1719019675526408e-05,
"loss": 0.2611,
"step": 2540
},
{
"epoch": 2.369818351187704,
"grad_norm": 0.24598386932397875,
"learning_rate": 1.1632723507076287e-05,
"loss": 0.272,
"step": 2545
},
{
"epoch": 2.3744760130414533,
"grad_norm": 0.24820744671461673,
"learning_rate": 1.1546427338626167e-05,
"loss": 0.2715,
"step": 2550
},
{
"epoch": 2.3791336748952028,
"grad_norm": 0.2546651811004806,
"learning_rate": 1.1460131170176045e-05,
"loss": 0.2675,
"step": 2555
},
{
"epoch": 2.383791336748952,
"grad_norm": 0.28012240083023987,
"learning_rate": 1.1373835001725925e-05,
"loss": 0.2686,
"step": 2560
},
{
"epoch": 2.3884489986027013,
"grad_norm": 0.25496187070989224,
"learning_rate": 1.1287538833275803e-05,
"loss": 0.2725,
"step": 2565
},
{
"epoch": 2.393106660456451,
"grad_norm": 0.2459565835067793,
"learning_rate": 1.1201242664825683e-05,
"loss": 0.2747,
"step": 2570
},
{
"epoch": 2.3977643223102003,
"grad_norm": 0.24992557924789235,
"learning_rate": 1.1114946496375561e-05,
"loss": 0.2702,
"step": 2575
},
{
"epoch": 2.40242198416395,
"grad_norm": 0.24548790397731274,
"learning_rate": 1.1028650327925441e-05,
"loss": 0.2694,
"step": 2580
},
{
"epoch": 2.4070796460176993,
"grad_norm": 0.2540166581404125,
"learning_rate": 1.094235415947532e-05,
"loss": 0.2673,
"step": 2585
},
{
"epoch": 2.4117373078714484,
"grad_norm": 0.24956814468845384,
"learning_rate": 1.0856057991025199e-05,
"loss": 0.26,
"step": 2590
},
{
"epoch": 2.416394969725198,
"grad_norm": 0.2544462078803143,
"learning_rate": 1.0769761822575077e-05,
"loss": 0.2757,
"step": 2595
},
{
"epoch": 2.4210526315789473,
"grad_norm": 0.25477644710778236,
"learning_rate": 1.0683465654124957e-05,
"loss": 0.2728,
"step": 2600
},
{
"epoch": 2.425710293432697,
"grad_norm": 0.2463193145902546,
"learning_rate": 1.0597169485674835e-05,
"loss": 0.2642,
"step": 2605
},
{
"epoch": 2.4303679552864463,
"grad_norm": 0.25119398756180855,
"learning_rate": 1.0510873317224715e-05,
"loss": 0.2623,
"step": 2610
},
{
"epoch": 2.4350256171401954,
"grad_norm": 0.2554200605154604,
"learning_rate": 1.0424577148774595e-05,
"loss": 0.2673,
"step": 2615
},
{
"epoch": 2.439683278993945,
"grad_norm": 0.24850654131551053,
"learning_rate": 1.0338280980324475e-05,
"loss": 0.2691,
"step": 2620
},
{
"epoch": 2.4443409408476944,
"grad_norm": 0.25529790540475594,
"learning_rate": 1.0251984811874353e-05,
"loss": 0.2623,
"step": 2625
},
{
"epoch": 2.448998602701444,
"grad_norm": 0.24935654678620325,
"learning_rate": 1.0165688643424233e-05,
"loss": 0.266,
"step": 2630
},
{
"epoch": 2.4536562645551934,
"grad_norm": 0.24948434199922845,
"learning_rate": 1.0079392474974112e-05,
"loss": 0.2697,
"step": 2635
},
{
"epoch": 2.458313926408943,
"grad_norm": 0.5728746729322969,
"learning_rate": 9.993096306523992e-06,
"loss": 0.2709,
"step": 2640
},
{
"epoch": 2.4629715882626924,
"grad_norm": 0.2604484130291829,
"learning_rate": 9.90680013807387e-06,
"loss": 0.2701,
"step": 2645
},
{
"epoch": 2.4676292501164414,
"grad_norm": 0.23543493626242007,
"learning_rate": 9.82050396962375e-06,
"loss": 0.2672,
"step": 2650
},
{
"epoch": 2.472286911970191,
"grad_norm": 0.24954954048333747,
"learning_rate": 9.734207801173628e-06,
"loss": 0.2681,
"step": 2655
},
{
"epoch": 2.4769445738239404,
"grad_norm": 0.24700156741936974,
"learning_rate": 9.647911632723508e-06,
"loss": 0.265,
"step": 2660
},
{
"epoch": 2.48160223567769,
"grad_norm": 0.24034208079197983,
"learning_rate": 9.561615464273386e-06,
"loss": 0.2662,
"step": 2665
},
{
"epoch": 2.4862598975314394,
"grad_norm": 0.24046526182978087,
"learning_rate": 9.475319295823266e-06,
"loss": 0.27,
"step": 2670
},
{
"epoch": 2.4909175593851884,
"grad_norm": 0.23736799959590765,
"learning_rate": 9.389023127373144e-06,
"loss": 0.2656,
"step": 2675
},
{
"epoch": 2.495575221238938,
"grad_norm": 0.2653182659378759,
"learning_rate": 9.302726958923024e-06,
"loss": 0.2635,
"step": 2680
},
{
"epoch": 2.5002328830926874,
"grad_norm": 0.24189834679652392,
"learning_rate": 9.216430790472904e-06,
"loss": 0.2688,
"step": 2685
},
{
"epoch": 2.504890544946437,
"grad_norm": 0.2535012437719162,
"learning_rate": 9.130134622022784e-06,
"loss": 0.2598,
"step": 2690
},
{
"epoch": 2.5095482068001864,
"grad_norm": 0.4021752590698154,
"learning_rate": 9.043838453572662e-06,
"loss": 0.2656,
"step": 2695
},
{
"epoch": 2.514205868653936,
"grad_norm": 0.2396407337253643,
"learning_rate": 8.957542285122542e-06,
"loss": 0.2609,
"step": 2700
},
{
"epoch": 2.5188635305076854,
"grad_norm": 0.2539861103581641,
"learning_rate": 8.87124611667242e-06,
"loss": 0.2688,
"step": 2705
},
{
"epoch": 2.5235211923614345,
"grad_norm": 0.2359234652862627,
"learning_rate": 8.7849499482223e-06,
"loss": 0.2653,
"step": 2710
},
{
"epoch": 2.528178854215184,
"grad_norm": 0.25756160385693805,
"learning_rate": 8.698653779772179e-06,
"loss": 0.2702,
"step": 2715
},
{
"epoch": 2.5328365160689335,
"grad_norm": 0.2407337745730763,
"learning_rate": 8.612357611322058e-06,
"loss": 0.2737,
"step": 2720
},
{
"epoch": 2.537494177922683,
"grad_norm": 0.250173406027027,
"learning_rate": 8.526061442871937e-06,
"loss": 0.2678,
"step": 2725
},
{
"epoch": 2.542151839776432,
"grad_norm": 0.2397210288165992,
"learning_rate": 8.439765274421817e-06,
"loss": 0.2763,
"step": 2730
},
{
"epoch": 2.5468095016301815,
"grad_norm": 0.2401796074674243,
"learning_rate": 8.353469105971695e-06,
"loss": 0.2645,
"step": 2735
},
{
"epoch": 2.551467163483931,
"grad_norm": 0.23868068898159894,
"learning_rate": 8.267172937521575e-06,
"loss": 0.2633,
"step": 2740
},
{
"epoch": 2.5561248253376805,
"grad_norm": 0.2547368143136742,
"learning_rate": 8.180876769071453e-06,
"loss": 0.265,
"step": 2745
},
{
"epoch": 2.56078248719143,
"grad_norm": 0.23073042137252345,
"learning_rate": 8.094580600621333e-06,
"loss": 0.2645,
"step": 2750
},
{
"epoch": 2.5654401490451795,
"grad_norm": 0.24607684349630346,
"learning_rate": 8.008284432171211e-06,
"loss": 0.2614,
"step": 2755
},
{
"epoch": 2.570097810898929,
"grad_norm": 0.24201219527867612,
"learning_rate": 7.921988263721091e-06,
"loss": 0.2686,
"step": 2760
},
{
"epoch": 2.574755472752678,
"grad_norm": 0.24091627691070966,
"learning_rate": 7.835692095270971e-06,
"loss": 0.2627,
"step": 2765
},
{
"epoch": 2.5794131346064275,
"grad_norm": 0.2474852577907246,
"learning_rate": 7.749395926820851e-06,
"loss": 0.266,
"step": 2770
},
{
"epoch": 2.584070796460177,
"grad_norm": 0.2508786185963276,
"learning_rate": 7.663099758370729e-06,
"loss": 0.2606,
"step": 2775
},
{
"epoch": 2.5887284583139265,
"grad_norm": 0.26765346867793416,
"learning_rate": 7.576803589920608e-06,
"loss": 0.2699,
"step": 2780
},
{
"epoch": 2.5933861201676756,
"grad_norm": 0.6100376969601905,
"learning_rate": 7.490507421470487e-06,
"loss": 0.2709,
"step": 2785
},
{
"epoch": 2.598043782021425,
"grad_norm": 0.24495451940584023,
"learning_rate": 7.4042112530203655e-06,
"loss": 0.2733,
"step": 2790
},
{
"epoch": 2.6027014438751745,
"grad_norm": 0.2791048492990286,
"learning_rate": 7.317915084570245e-06,
"loss": 0.2671,
"step": 2795
},
{
"epoch": 2.607359105728924,
"grad_norm": 0.24944148889122386,
"learning_rate": 7.231618916120124e-06,
"loss": 0.2682,
"step": 2800
},
{
"epoch": 2.6120167675826735,
"grad_norm": 0.250766764140112,
"learning_rate": 7.1453227476700035e-06,
"loss": 0.2666,
"step": 2805
},
{
"epoch": 2.616674429436423,
"grad_norm": 0.2781145664814796,
"learning_rate": 7.059026579219883e-06,
"loss": 0.2676,
"step": 2810
},
{
"epoch": 2.6213320912901725,
"grad_norm": 0.22829160732591977,
"learning_rate": 6.9727304107697625e-06,
"loss": 0.2598,
"step": 2815
},
{
"epoch": 2.625989753143922,
"grad_norm": 0.23886526932116794,
"learning_rate": 6.886434242319641e-06,
"loss": 0.2702,
"step": 2820
},
{
"epoch": 2.630647414997671,
"grad_norm": 0.2532267531205448,
"learning_rate": 6.800138073869521e-06,
"loss": 0.2683,
"step": 2825
},
{
"epoch": 2.6353050768514206,
"grad_norm": 0.2778041631063136,
"learning_rate": 6.713841905419399e-06,
"loss": 0.263,
"step": 2830
},
{
"epoch": 2.63996273870517,
"grad_norm": 0.23987705253673325,
"learning_rate": 6.627545736969279e-06,
"loss": 0.2722,
"step": 2835
},
{
"epoch": 2.6446204005589196,
"grad_norm": 0.2336359656913526,
"learning_rate": 6.541249568519157e-06,
"loss": 0.2762,
"step": 2840
},
{
"epoch": 2.6492780624126686,
"grad_norm": 0.2522255896799647,
"learning_rate": 6.454953400069037e-06,
"loss": 0.2677,
"step": 2845
},
{
"epoch": 2.653935724266418,
"grad_norm": 0.2362771386522153,
"learning_rate": 6.368657231618916e-06,
"loss": 0.2702,
"step": 2850
},
{
"epoch": 2.6585933861201676,
"grad_norm": 0.2603907654875355,
"learning_rate": 6.282361063168796e-06,
"loss": 0.2606,
"step": 2855
},
{
"epoch": 2.663251047973917,
"grad_norm": 0.2384253786782358,
"learning_rate": 6.196064894718675e-06,
"loss": 0.2665,
"step": 2860
},
{
"epoch": 2.6679087098276666,
"grad_norm": 0.2417383086843217,
"learning_rate": 6.109768726268554e-06,
"loss": 0.2654,
"step": 2865
},
{
"epoch": 2.672566371681416,
"grad_norm": 0.23682724066001656,
"learning_rate": 6.023472557818433e-06,
"loss": 0.2651,
"step": 2870
},
{
"epoch": 2.6772240335351656,
"grad_norm": 0.28125485834884123,
"learning_rate": 5.937176389368312e-06,
"loss": 0.2676,
"step": 2875
},
{
"epoch": 2.6818816953889146,
"grad_norm": 0.24560709342381493,
"learning_rate": 5.850880220918191e-06,
"loss": 0.2656,
"step": 2880
},
{
"epoch": 2.686539357242664,
"grad_norm": 0.24142604350710792,
"learning_rate": 5.76458405246807e-06,
"loss": 0.2736,
"step": 2885
},
{
"epoch": 2.6911970190964136,
"grad_norm": 0.23021865789214346,
"learning_rate": 5.67828788401795e-06,
"loss": 0.2673,
"step": 2890
},
{
"epoch": 2.695854680950163,
"grad_norm": 0.24645998952166318,
"learning_rate": 5.591991715567829e-06,
"loss": 0.2621,
"step": 2895
},
{
"epoch": 2.700512342803912,
"grad_norm": 0.23281318258868106,
"learning_rate": 5.5056955471177085e-06,
"loss": 0.2645,
"step": 2900
},
{
"epoch": 2.7051700046576617,
"grad_norm": 0.2326586772152503,
"learning_rate": 5.4193993786675876e-06,
"loss": 0.2593,
"step": 2905
},
{
"epoch": 2.709827666511411,
"grad_norm": 0.23989592399985699,
"learning_rate": 5.333103210217467e-06,
"loss": 0.2625,
"step": 2910
},
{
"epoch": 2.7144853283651607,
"grad_norm": 0.24264635453844383,
"learning_rate": 5.246807041767346e-06,
"loss": 0.2685,
"step": 2915
},
{
"epoch": 2.71914299021891,
"grad_norm": 0.23505622614412772,
"learning_rate": 5.160510873317225e-06,
"loss": 0.2619,
"step": 2920
},
{
"epoch": 2.7238006520726596,
"grad_norm": 0.2954774090778297,
"learning_rate": 5.074214704867105e-06,
"loss": 0.2709,
"step": 2925
},
{
"epoch": 2.728458313926409,
"grad_norm": 0.23449375146214052,
"learning_rate": 4.987918536416984e-06,
"loss": 0.2686,
"step": 2930
},
{
"epoch": 2.7331159757801586,
"grad_norm": 0.22660197484627229,
"learning_rate": 4.901622367966863e-06,
"loss": 0.2639,
"step": 2935
},
{
"epoch": 2.7377736376339077,
"grad_norm": 0.25366849181683887,
"learning_rate": 4.815326199516742e-06,
"loss": 0.2652,
"step": 2940
},
{
"epoch": 2.742431299487657,
"grad_norm": 0.23773508357884918,
"learning_rate": 4.729030031066621e-06,
"loss": 0.272,
"step": 2945
},
{
"epoch": 2.7470889613414067,
"grad_norm": 0.23883706049912928,
"learning_rate": 4.6427338626165e-06,
"loss": 0.262,
"step": 2950
},
{
"epoch": 2.751746623195156,
"grad_norm": 0.245082821723228,
"learning_rate": 4.556437694166379e-06,
"loss": 0.266,
"step": 2955
},
{
"epoch": 2.7564042850489052,
"grad_norm": 0.24388812731912868,
"learning_rate": 4.470141525716258e-06,
"loss": 0.2665,
"step": 2960
},
{
"epoch": 2.7610619469026547,
"grad_norm": 0.23080388123509749,
"learning_rate": 4.383845357266138e-06,
"loss": 0.2687,
"step": 2965
},
{
"epoch": 2.765719608756404,
"grad_norm": 0.23304757623637526,
"learning_rate": 4.297549188816017e-06,
"loss": 0.2629,
"step": 2970
},
{
"epoch": 2.7703772706101537,
"grad_norm": 0.2629554897037301,
"learning_rate": 4.211253020365896e-06,
"loss": 0.2656,
"step": 2975
},
{
"epoch": 2.775034932463903,
"grad_norm": 0.24540971434329928,
"learning_rate": 4.124956851915775e-06,
"loss": 0.2629,
"step": 2980
},
{
"epoch": 2.7796925943176527,
"grad_norm": 0.30014532921383136,
"learning_rate": 4.0386606834656544e-06,
"loss": 0.2678,
"step": 2985
},
{
"epoch": 2.784350256171402,
"grad_norm": 0.2342133541506607,
"learning_rate": 3.9523645150155335e-06,
"loss": 0.2654,
"step": 2990
},
{
"epoch": 2.7890079180251512,
"grad_norm": 0.23959451694876363,
"learning_rate": 3.8660683465654126e-06,
"loss": 0.2652,
"step": 2995
},
{
"epoch": 2.7936655798789007,
"grad_norm": 0.24027322925970268,
"learning_rate": 3.779772178115292e-06,
"loss": 0.2683,
"step": 3000
},
{
"epoch": 2.7983232417326502,
"grad_norm": 0.22866196108603312,
"learning_rate": 3.693476009665171e-06,
"loss": 0.2638,
"step": 3005
},
{
"epoch": 2.8029809035863997,
"grad_norm": 0.23207917421270907,
"learning_rate": 3.6071798412150506e-06,
"loss": 0.2697,
"step": 3010
},
{
"epoch": 2.807638565440149,
"grad_norm": 0.24148817561468802,
"learning_rate": 3.5208836727649297e-06,
"loss": 0.2548,
"step": 3015
},
{
"epoch": 2.8122962272938983,
"grad_norm": 0.24826400013212727,
"learning_rate": 3.434587504314809e-06,
"loss": 0.2669,
"step": 3020
},
{
"epoch": 2.8169538891476478,
"grad_norm": 0.22758778242341457,
"learning_rate": 3.348291335864688e-06,
"loss": 0.2631,
"step": 3025
},
{
"epoch": 2.8216115510013973,
"grad_norm": 0.2278230958357378,
"learning_rate": 3.2619951674145674e-06,
"loss": 0.2617,
"step": 3030
},
{
"epoch": 2.8262692128551468,
"grad_norm": 0.2301525600023757,
"learning_rate": 3.1756989989644464e-06,
"loss": 0.2652,
"step": 3035
},
{
"epoch": 2.8309268747088963,
"grad_norm": 0.24711954628030958,
"learning_rate": 3.089402830514325e-06,
"loss": 0.2751,
"step": 3040
},
{
"epoch": 2.8355845365626458,
"grad_norm": 0.26581279477281244,
"learning_rate": 3.0031066620642046e-06,
"loss": 0.2609,
"step": 3045
},
{
"epoch": 2.840242198416395,
"grad_norm": 0.24890741319507606,
"learning_rate": 2.9168104936140837e-06,
"loss": 0.261,
"step": 3050
},
{
"epoch": 2.8448998602701443,
"grad_norm": 0.30572357087890906,
"learning_rate": 2.8305143251639627e-06,
"loss": 0.269,
"step": 3055
},
{
"epoch": 2.849557522123894,
"grad_norm": 0.3070212619911331,
"learning_rate": 2.7442181567138422e-06,
"loss": 0.2717,
"step": 3060
},
{
"epoch": 2.8542151839776433,
"grad_norm": 0.24123580523943625,
"learning_rate": 2.6579219882637213e-06,
"loss": 0.2681,
"step": 3065
},
{
"epoch": 2.858872845831393,
"grad_norm": 0.23087439262318699,
"learning_rate": 2.5716258198136004e-06,
"loss": 0.2582,
"step": 3070
},
{
"epoch": 2.863530507685142,
"grad_norm": 0.2391097559052989,
"learning_rate": 2.4853296513634795e-06,
"loss": 0.2631,
"step": 3075
},
{
"epoch": 2.8681881695388913,
"grad_norm": 1.6021977536357102,
"learning_rate": 2.399033482913359e-06,
"loss": 0.2619,
"step": 3080
},
{
"epoch": 2.872845831392641,
"grad_norm": 0.2257095993666073,
"learning_rate": 2.312737314463238e-06,
"loss": 0.2638,
"step": 3085
},
{
"epoch": 2.8775034932463903,
"grad_norm": 0.23447988430791128,
"learning_rate": 2.226441146013117e-06,
"loss": 0.2695,
"step": 3090
},
{
"epoch": 2.88216115510014,
"grad_norm": 0.23336542967563148,
"learning_rate": 2.140144977562996e-06,
"loss": 0.258,
"step": 3095
},
{
"epoch": 2.8868188169538893,
"grad_norm": 0.22960523248355422,
"learning_rate": 2.0538488091128757e-06,
"loss": 0.2701,
"step": 3100
},
{
"epoch": 2.891476478807639,
"grad_norm": 0.23768769948413135,
"learning_rate": 1.9675526406627547e-06,
"loss": 0.2651,
"step": 3105
},
{
"epoch": 2.896134140661388,
"grad_norm": 0.2264847690514202,
"learning_rate": 1.8812564722126338e-06,
"loss": 0.2592,
"step": 3110
},
{
"epoch": 2.9007918025151374,
"grad_norm": 0.22932272675979026,
"learning_rate": 1.794960303762513e-06,
"loss": 0.2565,
"step": 3115
},
{
"epoch": 2.905449464368887,
"grad_norm": 0.23637996003822423,
"learning_rate": 1.7086641353123924e-06,
"loss": 0.2659,
"step": 3120
},
{
"epoch": 2.9101071262226363,
"grad_norm": 3.6839450033328283,
"learning_rate": 1.6223679668622715e-06,
"loss": 0.2672,
"step": 3125
},
{
"epoch": 2.9147647880763854,
"grad_norm": 0.2227886774143204,
"learning_rate": 1.5360717984121505e-06,
"loss": 0.255,
"step": 3130
},
{
"epoch": 2.919422449930135,
"grad_norm": 0.2271396022815255,
"learning_rate": 1.4497756299620296e-06,
"loss": 0.263,
"step": 3135
},
{
"epoch": 2.9240801117838844,
"grad_norm": 0.22277258393762092,
"learning_rate": 1.363479461511909e-06,
"loss": 0.272,
"step": 3140
},
{
"epoch": 2.928737773637634,
"grad_norm": 0.23371618200567057,
"learning_rate": 1.277183293061788e-06,
"loss": 0.2733,
"step": 3145
},
{
"epoch": 2.9333954354913834,
"grad_norm": 0.2264313972112628,
"learning_rate": 1.1908871246116673e-06,
"loss": 0.2639,
"step": 3150
},
{
"epoch": 2.938053097345133,
"grad_norm": 0.4570714544778108,
"learning_rate": 1.1045909561615463e-06,
"loss": 0.2642,
"step": 3155
},
{
"epoch": 2.9427107591988824,
"grad_norm": 0.2366565080467962,
"learning_rate": 1.0182947877114256e-06,
"loss": 0.2639,
"step": 3160
},
{
"epoch": 2.9473684210526314,
"grad_norm": 0.2346391676286511,
"learning_rate": 9.319986192613048e-07,
"loss": 0.2659,
"step": 3165
},
{
"epoch": 2.952026082906381,
"grad_norm": 0.21986695916960491,
"learning_rate": 8.45702450811184e-07,
"loss": 0.2613,
"step": 3170
},
{
"epoch": 2.9566837447601304,
"grad_norm": 0.23068227390735385,
"learning_rate": 7.594062823610632e-07,
"loss": 0.2652,
"step": 3175
},
{
"epoch": 2.96134140661388,
"grad_norm": 0.2218014778660938,
"learning_rate": 6.731101139109423e-07,
"loss": 0.2607,
"step": 3180
},
{
"epoch": 2.9659990684676294,
"grad_norm": 0.23904524486615045,
"learning_rate": 5.868139454608215e-07,
"loss": 0.2647,
"step": 3185
},
{
"epoch": 2.9706567303213784,
"grad_norm": 0.22329952211908216,
"learning_rate": 5.005177770107007e-07,
"loss": 0.2562,
"step": 3190
},
{
"epoch": 2.975314392175128,
"grad_norm": 0.23326673144161586,
"learning_rate": 4.142216085605799e-07,
"loss": 0.2678,
"step": 3195
},
{
"epoch": 2.9799720540288774,
"grad_norm": 0.23348286530329374,
"learning_rate": 3.279254401104591e-07,
"loss": 0.2668,
"step": 3200
},
{
"epoch": 2.984629715882627,
"grad_norm": 0.23276521431022687,
"learning_rate": 2.416292716603383e-07,
"loss": 0.2684,
"step": 3205
},
{
"epoch": 2.9892873777363764,
"grad_norm": 0.2257062545640731,
"learning_rate": 1.5533310321021747e-07,
"loss": 0.2529,
"step": 3210
},
{
"epoch": 2.993945039590126,
"grad_norm": 0.23253102905210304,
"learning_rate": 6.903693476009665e-08,
"loss": 0.2549,
"step": 3215
},
{
"epoch": 2.9976711690731253,
"step": 3219,
"total_flos": 2.754977641940386e+18,
"train_loss": 0.3562816010013283,
"train_runtime": 43513.4977,
"train_samples_per_second": 1.184,
"train_steps_per_second": 0.074
}
],
"logging_steps": 5,
"max_steps": 3219,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 2.754977641940386e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}