terry69's picture
Model save
8a3dd4b verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.999792917788362,
"eval_steps": 500,
"global_step": 1207,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008283288465520812,
"grad_norm": 24.23784679529398,
"learning_rate": 8.264462809917357e-08,
"loss": 1.4304,
"step": 1
},
{
"epoch": 0.0041416442327604054,
"grad_norm": 23.234915420644615,
"learning_rate": 4.132231404958678e-07,
"loss": 1.4144,
"step": 5
},
{
"epoch": 0.008283288465520811,
"grad_norm": 8.222025109804092,
"learning_rate": 8.264462809917356e-07,
"loss": 1.307,
"step": 10
},
{
"epoch": 0.012424932698281217,
"grad_norm": 10.133789775285283,
"learning_rate": 1.2396694214876035e-06,
"loss": 1.1636,
"step": 15
},
{
"epoch": 0.016566576931041622,
"grad_norm": 2.9246570997342918,
"learning_rate": 1.6528925619834712e-06,
"loss": 1.0151,
"step": 20
},
{
"epoch": 0.02070822116380203,
"grad_norm": 2.7388624531591717,
"learning_rate": 2.066115702479339e-06,
"loss": 0.9484,
"step": 25
},
{
"epoch": 0.024849865396562434,
"grad_norm": 2.365875950285307,
"learning_rate": 2.479338842975207e-06,
"loss": 0.9332,
"step": 30
},
{
"epoch": 0.028991509629322842,
"grad_norm": 2.374922722027774,
"learning_rate": 2.8925619834710743e-06,
"loss": 0.9044,
"step": 35
},
{
"epoch": 0.033133153862083244,
"grad_norm": 2.238010214125612,
"learning_rate": 3.3057851239669424e-06,
"loss": 0.8913,
"step": 40
},
{
"epoch": 0.03727479809484365,
"grad_norm": 2.4490973058865078,
"learning_rate": 3.71900826446281e-06,
"loss": 0.8833,
"step": 45
},
{
"epoch": 0.04141644232760406,
"grad_norm": 2.469516335084805,
"learning_rate": 4.132231404958678e-06,
"loss": 0.8813,
"step": 50
},
{
"epoch": 0.04555808656036447,
"grad_norm": 2.2371905595594495,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.8533,
"step": 55
},
{
"epoch": 0.04969973079312487,
"grad_norm": 2.409737572391393,
"learning_rate": 4.958677685950414e-06,
"loss": 0.8581,
"step": 60
},
{
"epoch": 0.05384137502588528,
"grad_norm": 2.2291530452515977,
"learning_rate": 5.371900826446281e-06,
"loss": 0.8519,
"step": 65
},
{
"epoch": 0.057983019258645685,
"grad_norm": 2.35453266908653,
"learning_rate": 5.785123966942149e-06,
"loss": 0.8515,
"step": 70
},
{
"epoch": 0.062124663491406086,
"grad_norm": 2.336463988259032,
"learning_rate": 6.198347107438017e-06,
"loss": 0.8295,
"step": 75
},
{
"epoch": 0.06626630772416649,
"grad_norm": 2.4980329883385624,
"learning_rate": 6.611570247933885e-06,
"loss": 0.832,
"step": 80
},
{
"epoch": 0.0704079519569269,
"grad_norm": 2.6275854974083384,
"learning_rate": 7.0247933884297525e-06,
"loss": 0.816,
"step": 85
},
{
"epoch": 0.0745495961896873,
"grad_norm": 2.684536070785031,
"learning_rate": 7.43801652892562e-06,
"loss": 0.8162,
"step": 90
},
{
"epoch": 0.07869124042244771,
"grad_norm": 2.805145187507444,
"learning_rate": 7.851239669421489e-06,
"loss": 0.8309,
"step": 95
},
{
"epoch": 0.08283288465520812,
"grad_norm": 2.4685576686167874,
"learning_rate": 8.264462809917356e-06,
"loss": 0.8155,
"step": 100
},
{
"epoch": 0.08697452888796853,
"grad_norm": 2.540099419306748,
"learning_rate": 8.677685950413224e-06,
"loss": 0.8071,
"step": 105
},
{
"epoch": 0.09111617312072894,
"grad_norm": 2.562872480103855,
"learning_rate": 9.090909090909091e-06,
"loss": 0.8029,
"step": 110
},
{
"epoch": 0.09525781735348933,
"grad_norm": 2.5970244377170313,
"learning_rate": 9.50413223140496e-06,
"loss": 0.7873,
"step": 115
},
{
"epoch": 0.09939946158624974,
"grad_norm": 2.4308464508943346,
"learning_rate": 9.917355371900828e-06,
"loss": 0.7915,
"step": 120
},
{
"epoch": 0.10354110581901015,
"grad_norm": 2.6905207268547158,
"learning_rate": 9.999665269535307e-06,
"loss": 0.7993,
"step": 125
},
{
"epoch": 0.10768275005177055,
"grad_norm": 34.133455991097065,
"learning_rate": 9.998305503833872e-06,
"loss": 0.8009,
"step": 130
},
{
"epoch": 0.11182439428453096,
"grad_norm": 2.3166119518346866,
"learning_rate": 9.995900066492902e-06,
"loss": 0.785,
"step": 135
},
{
"epoch": 0.11596603851729137,
"grad_norm": 2.5556998820575076,
"learning_rate": 9.992449460742464e-06,
"loss": 0.8134,
"step": 140
},
{
"epoch": 0.12010768275005176,
"grad_norm": 2.615095977165256,
"learning_rate": 9.98795440846732e-06,
"loss": 0.7898,
"step": 145
},
{
"epoch": 0.12424932698281217,
"grad_norm": 2.729536225853297,
"learning_rate": 9.982415850055902e-06,
"loss": 0.7681,
"step": 150
},
{
"epoch": 0.12839097121557258,
"grad_norm": 3.895339838479258,
"learning_rate": 9.975834944203581e-06,
"loss": 0.7503,
"step": 155
},
{
"epoch": 0.13253261544833297,
"grad_norm": 2.4875917416718187,
"learning_rate": 9.968213067670265e-06,
"loss": 0.7632,
"step": 160
},
{
"epoch": 0.1366742596810934,
"grad_norm": 2.5249141397756847,
"learning_rate": 9.959551814992364e-06,
"loss": 0.7444,
"step": 165
},
{
"epoch": 0.1408159039138538,
"grad_norm": 2.6613194516011656,
"learning_rate": 9.949852998149217e-06,
"loss": 0.7592,
"step": 170
},
{
"epoch": 0.1449575481466142,
"grad_norm": 2.535010612771426,
"learning_rate": 9.939118646184007e-06,
"loss": 0.7625,
"step": 175
},
{
"epoch": 0.1490991923793746,
"grad_norm": 2.658211254916015,
"learning_rate": 9.927351004779275e-06,
"loss": 0.7405,
"step": 180
},
{
"epoch": 0.15324083661213503,
"grad_norm": 2.343186435782198,
"learning_rate": 9.914552535787122e-06,
"loss": 0.7285,
"step": 185
},
{
"epoch": 0.15738248084489542,
"grad_norm": 2.4661024566837653,
"learning_rate": 9.900725916714157e-06,
"loss": 0.7405,
"step": 190
},
{
"epoch": 0.16152412507765582,
"grad_norm": 2.7732706905434497,
"learning_rate": 9.885874040161373e-06,
"loss": 0.7373,
"step": 195
},
{
"epoch": 0.16566576931041624,
"grad_norm": 2.523332136572105,
"learning_rate": 9.87000001321898e-06,
"loss": 0.7261,
"step": 200
},
{
"epoch": 0.16980741354317663,
"grad_norm": 2.2311284634557937,
"learning_rate": 9.853107156816393e-06,
"loss": 0.7184,
"step": 205
},
{
"epoch": 0.17394905777593705,
"grad_norm": 2.379531410344561,
"learning_rate": 9.835199005027477e-06,
"loss": 0.7144,
"step": 210
},
{
"epoch": 0.17809070200869745,
"grad_norm": 2.3058139982021215,
"learning_rate": 9.816279304331202e-06,
"loss": 0.7252,
"step": 215
},
{
"epoch": 0.18223234624145787,
"grad_norm": 2.2361882677362974,
"learning_rate": 9.79635201282785e-06,
"loss": 0.7151,
"step": 220
},
{
"epoch": 0.18637399047421827,
"grad_norm": 2.7702057401479006,
"learning_rate": 9.775421299410977e-06,
"loss": 0.7045,
"step": 225
},
{
"epoch": 0.19051563470697866,
"grad_norm": 2.5524695305725404,
"learning_rate": 9.753491542895237e-06,
"loss": 0.715,
"step": 230
},
{
"epoch": 0.19465727893973908,
"grad_norm": 2.4487445215481305,
"learning_rate": 9.730567331100333e-06,
"loss": 0.7082,
"step": 235
},
{
"epoch": 0.19879892317249948,
"grad_norm": 2.2482641063346116,
"learning_rate": 9.706653459891207e-06,
"loss": 0.699,
"step": 240
},
{
"epoch": 0.2029405674052599,
"grad_norm": 2.451295868720965,
"learning_rate": 9.681754932174719e-06,
"loss": 0.6913,
"step": 245
},
{
"epoch": 0.2070822116380203,
"grad_norm": 2.56626439770444,
"learning_rate": 9.655876956853025e-06,
"loss": 0.6819,
"step": 250
},
{
"epoch": 0.2112238558707807,
"grad_norm": 2.490881018154125,
"learning_rate": 9.629024947733836e-06,
"loss": 0.6843,
"step": 255
},
{
"epoch": 0.2153655001035411,
"grad_norm": 2.4655094247796283,
"learning_rate": 9.601204522397826e-06,
"loss": 0.6874,
"step": 260
},
{
"epoch": 0.2195071443363015,
"grad_norm": 2.6029112443690625,
"learning_rate": 9.572421501023403e-06,
"loss": 0.6773,
"step": 265
},
{
"epoch": 0.22364878856906192,
"grad_norm": 2.303292362769761,
"learning_rate": 9.5426819051691e-06,
"loss": 0.6669,
"step": 270
},
{
"epoch": 0.22779043280182232,
"grad_norm": 2.2947735744081568,
"learning_rate": 9.511991956513828e-06,
"loss": 0.6636,
"step": 275
},
{
"epoch": 0.23193207703458274,
"grad_norm": 2.3561731246407045,
"learning_rate": 9.480358075555278e-06,
"loss": 0.6671,
"step": 280
},
{
"epoch": 0.23607372126734313,
"grad_norm": 2.4666706522590442,
"learning_rate": 9.447786880266706e-06,
"loss": 0.6618,
"step": 285
},
{
"epoch": 0.24021536550010353,
"grad_norm": 2.186824941231579,
"learning_rate": 9.414285184712432e-06,
"loss": 0.6619,
"step": 290
},
{
"epoch": 0.24435700973286395,
"grad_norm": 2.1988625035941047,
"learning_rate": 9.37985999762229e-06,
"loss": 0.6473,
"step": 295
},
{
"epoch": 0.24849865396562434,
"grad_norm": 2.43262245251786,
"learning_rate": 9.344518520925377e-06,
"loss": 0.6534,
"step": 300
},
{
"epoch": 0.25264029819838474,
"grad_norm": 2.3140467166469527,
"learning_rate": 9.308268148243355e-06,
"loss": 0.637,
"step": 305
},
{
"epoch": 0.25678194243114516,
"grad_norm": 2.5057247056088996,
"learning_rate": 9.271116463343692e-06,
"loss": 0.6417,
"step": 310
},
{
"epoch": 0.2609235866639056,
"grad_norm": 2.288973054378257,
"learning_rate": 9.23307123855307e-06,
"loss": 0.663,
"step": 315
},
{
"epoch": 0.26506523089666595,
"grad_norm": 2.416778922999839,
"learning_rate": 9.194140433131397e-06,
"loss": 0.6552,
"step": 320
},
{
"epoch": 0.26920687512942637,
"grad_norm": 2.2651292744956244,
"learning_rate": 9.154332191606671e-06,
"loss": 0.6267,
"step": 325
},
{
"epoch": 0.2733485193621868,
"grad_norm": 2.3801866420145954,
"learning_rate": 9.113654842071114e-06,
"loss": 0.6306,
"step": 330
},
{
"epoch": 0.2774901635949472,
"grad_norm": 2.5824252886751964,
"learning_rate": 9.072116894438885e-06,
"loss": 0.6369,
"step": 335
},
{
"epoch": 0.2816318078277076,
"grad_norm": 2.3287880261931875,
"learning_rate": 9.029727038665765e-06,
"loss": 0.6252,
"step": 340
},
{
"epoch": 0.285773452060468,
"grad_norm": 2.3253013276198358,
"learning_rate": 8.986494142931168e-06,
"loss": 0.6165,
"step": 345
},
{
"epoch": 0.2899150962932284,
"grad_norm": 2.3141576363210086,
"learning_rate": 8.94242725178288e-06,
"loss": 0.6003,
"step": 350
},
{
"epoch": 0.2940567405259888,
"grad_norm": 2.2453531818296595,
"learning_rate": 8.89753558424488e-06,
"loss": 0.6,
"step": 355
},
{
"epoch": 0.2981983847587492,
"grad_norm": 2.224520930970933,
"learning_rate": 8.851828531888692e-06,
"loss": 0.6117,
"step": 360
},
{
"epoch": 0.30234002899150964,
"grad_norm": 2.312078602771557,
"learning_rate": 8.805315656868587e-06,
"loss": 0.6067,
"step": 365
},
{
"epoch": 0.30648167322427006,
"grad_norm": 2.287152946702887,
"learning_rate": 8.75800668992117e-06,
"loss": 0.5979,
"step": 370
},
{
"epoch": 0.3106233174570304,
"grad_norm": 2.0335651194660596,
"learning_rate": 8.709911528329623e-06,
"loss": 0.5911,
"step": 375
},
{
"epoch": 0.31476496168979085,
"grad_norm": 2.190338892871162,
"learning_rate": 8.661040233853166e-06,
"loss": 0.5884,
"step": 380
},
{
"epoch": 0.31890660592255127,
"grad_norm": 2.198737460879657,
"learning_rate": 8.611403030622074e-06,
"loss": 0.578,
"step": 385
},
{
"epoch": 0.32304825015531163,
"grad_norm": 2.320069378855735,
"learning_rate": 8.561010302998734e-06,
"loss": 0.5809,
"step": 390
},
{
"epoch": 0.32718989438807206,
"grad_norm": 2.1718618035260793,
"learning_rate": 8.509872593405189e-06,
"loss": 0.5834,
"step": 395
},
{
"epoch": 0.3313315386208325,
"grad_norm": 2.306249404711425,
"learning_rate": 8.458000600117604e-06,
"loss": 0.5817,
"step": 400
},
{
"epoch": 0.3354731828535929,
"grad_norm": 2.2320669776692377,
"learning_rate": 8.40540517502813e-06,
"loss": 0.579,
"step": 405
},
{
"epoch": 0.33961482708635327,
"grad_norm": 2.380541232391175,
"learning_rate": 8.35209732137463e-06,
"loss": 0.5657,
"step": 410
},
{
"epoch": 0.3437564713191137,
"grad_norm": 2.270445952132619,
"learning_rate": 8.298088191438753e-06,
"loss": 0.5569,
"step": 415
},
{
"epoch": 0.3478981155518741,
"grad_norm": 2.353449916475927,
"learning_rate": 8.243389084212808e-06,
"loss": 0.5642,
"step": 420
},
{
"epoch": 0.3520397597846345,
"grad_norm": 2.321086875711968,
"learning_rate": 8.188011443035962e-06,
"loss": 0.5519,
"step": 425
},
{
"epoch": 0.3561814040173949,
"grad_norm": 2.112886796528374,
"learning_rate": 8.131966853200226e-06,
"loss": 0.561,
"step": 430
},
{
"epoch": 0.3603230482501553,
"grad_norm": 2.354896774745211,
"learning_rate": 8.075267039526764e-06,
"loss": 0.5586,
"step": 435
},
{
"epoch": 0.36446469248291574,
"grad_norm": 2.291143610369071,
"learning_rate": 8.017923863912989e-06,
"loss": 0.5491,
"step": 440
},
{
"epoch": 0.3686063367156761,
"grad_norm": 2.2300308493171115,
"learning_rate": 7.959949322850994e-06,
"loss": 0.5562,
"step": 445
},
{
"epoch": 0.37274798094843653,
"grad_norm": 2.28359859552775,
"learning_rate": 7.901355544917827e-06,
"loss": 0.5494,
"step": 450
},
{
"epoch": 0.37688962518119695,
"grad_norm": 2.251157158354555,
"learning_rate": 7.842154788238124e-06,
"loss": 0.5424,
"step": 455
},
{
"epoch": 0.3810312694139573,
"grad_norm": 2.285389132836112,
"learning_rate": 7.782359437919644e-06,
"loss": 0.5423,
"step": 460
},
{
"epoch": 0.38517291364671774,
"grad_norm": 2.578954308129232,
"learning_rate": 7.721982003462255e-06,
"loss": 0.5335,
"step": 465
},
{
"epoch": 0.38931455787947816,
"grad_norm": 2.1334475270465774,
"learning_rate": 7.661035116140856e-06,
"loss": 0.5342,
"step": 470
},
{
"epoch": 0.3934562021122386,
"grad_norm": 2.3233034598991797,
"learning_rate": 7.599531526362873e-06,
"loss": 0.5358,
"step": 475
},
{
"epoch": 0.39759784634499895,
"grad_norm": 2.2758511904313785,
"learning_rate": 7.537484101000787e-06,
"loss": 0.532,
"step": 480
},
{
"epoch": 0.4017394905777594,
"grad_norm": 2.151297761784936,
"learning_rate": 7.474905820700334e-06,
"loss": 0.5135,
"step": 485
},
{
"epoch": 0.4058811348105198,
"grad_norm": 2.3747508627507066,
"learning_rate": 7.411809777164873e-06,
"loss": 0.5306,
"step": 490
},
{
"epoch": 0.41002277904328016,
"grad_norm": 2.202301523886571,
"learning_rate": 7.3482091704165405e-06,
"loss": 0.5247,
"step": 495
},
{
"epoch": 0.4141644232760406,
"grad_norm": 2.153444078468182,
"learning_rate": 7.284117306034733e-06,
"loss": 0.5243,
"step": 500
},
{
"epoch": 0.418306067508801,
"grad_norm": 2.171064236273241,
"learning_rate": 7.219547592372512e-06,
"loss": 0.5187,
"step": 505
},
{
"epoch": 0.4224477117415614,
"grad_norm": 2.2667901238143586,
"learning_rate": 7.15451353775151e-06,
"loss": 0.5126,
"step": 510
},
{
"epoch": 0.4265893559743218,
"grad_norm": 2.4217664685492997,
"learning_rate": 7.089028747635908e-06,
"loss": 0.5166,
"step": 515
},
{
"epoch": 0.4307310002070822,
"grad_norm": 2.269198948091921,
"learning_rate": 7.023106921786118e-06,
"loss": 0.5102,
"step": 520
},
{
"epoch": 0.43487264443984264,
"grad_norm": 2.3389291631103717,
"learning_rate": 6.956761851392706e-06,
"loss": 0.5147,
"step": 525
},
{
"epoch": 0.439014288672603,
"grad_norm": 2.130307708383959,
"learning_rate": 6.890007416191209e-06,
"loss": 0.5,
"step": 530
},
{
"epoch": 0.4431559329053634,
"grad_norm": 2.2319755819000133,
"learning_rate": 6.822857581558423e-06,
"loss": 0.5031,
"step": 535
},
{
"epoch": 0.44729757713812385,
"grad_norm": 2.201034276249066,
"learning_rate": 6.7553263955907755e-06,
"loss": 0.5003,
"step": 540
},
{
"epoch": 0.4514392213708842,
"grad_norm": 2.1906710526713877,
"learning_rate": 6.687427986165379e-06,
"loss": 0.498,
"step": 545
},
{
"epoch": 0.45558086560364464,
"grad_norm": 2.421545219253794,
"learning_rate": 6.6191765579844205e-06,
"loss": 0.4996,
"step": 550
},
{
"epoch": 0.45972250983640506,
"grad_norm": 2.2909926333329977,
"learning_rate": 6.550586389603451e-06,
"loss": 0.4969,
"step": 555
},
{
"epoch": 0.4638641540691655,
"grad_norm": 2.3444032170833116,
"learning_rate": 6.481671830444243e-06,
"loss": 0.4945,
"step": 560
},
{
"epoch": 0.46800579830192585,
"grad_norm": 2.1725157981177916,
"learning_rate": 6.412447297792818e-06,
"loss": 0.4863,
"step": 565
},
{
"epoch": 0.47214744253468627,
"grad_norm": 2.1573058335782753,
"learning_rate": 6.3429272737832726e-06,
"loss": 0.4891,
"step": 570
},
{
"epoch": 0.4762890867674467,
"grad_norm": 2.2084194407586373,
"learning_rate": 6.273126302368037e-06,
"loss": 0.487,
"step": 575
},
{
"epoch": 0.48043073100020706,
"grad_norm": 2.21344464284995,
"learning_rate": 6.203058986275207e-06,
"loss": 0.4857,
"step": 580
},
{
"epoch": 0.4845723752329675,
"grad_norm": 2.2203608524225666,
"learning_rate": 6.132739983953579e-06,
"loss": 0.4828,
"step": 585
},
{
"epoch": 0.4887140194657279,
"grad_norm": 2.113371175419138,
"learning_rate": 6.062184006506027e-06,
"loss": 0.4826,
"step": 590
},
{
"epoch": 0.4928556636984883,
"grad_norm": 2.3454680885393464,
"learning_rate": 5.991405814611855e-06,
"loss": 0.4676,
"step": 595
},
{
"epoch": 0.4969973079312487,
"grad_norm": 2.321964160860843,
"learning_rate": 5.920420215438794e-06,
"loss": 0.4737,
"step": 600
},
{
"epoch": 0.5011389521640092,
"grad_norm": 2.1991634408721943,
"learning_rate": 5.849242059545259e-06,
"loss": 0.465,
"step": 605
},
{
"epoch": 0.5052805963967695,
"grad_norm": 2.165800122945675,
"learning_rate": 5.777886237773542e-06,
"loss": 0.4623,
"step": 610
},
{
"epoch": 0.5094222406295299,
"grad_norm": 2.1403595970614133,
"learning_rate": 5.706367678134562e-06,
"loss": 0.4767,
"step": 615
},
{
"epoch": 0.5135638848622903,
"grad_norm": 2.1427662247598986,
"learning_rate": 5.634701342684852e-06,
"loss": 0.4607,
"step": 620
},
{
"epoch": 0.5177055290950507,
"grad_norm": 2.0626908104196673,
"learning_rate": 5.562902224396416e-06,
"loss": 0.4617,
"step": 625
},
{
"epoch": 0.5218471733278112,
"grad_norm": 2.0098208614700326,
"learning_rate": 5.49098534402012e-06,
"loss": 0.4618,
"step": 630
},
{
"epoch": 0.5259888175605716,
"grad_norm": 2.0735743487283607,
"learning_rate": 5.418965746943281e-06,
"loss": 0.459,
"step": 635
},
{
"epoch": 0.5301304617933319,
"grad_norm": 2.024087007627894,
"learning_rate": 5.34685850004208e-06,
"loss": 0.4539,
"step": 640
},
{
"epoch": 0.5342721060260923,
"grad_norm": 2.130234594696483,
"learning_rate": 5.2746786885295034e-06,
"loss": 0.453,
"step": 645
},
{
"epoch": 0.5384137502588527,
"grad_norm": 2.0986269383387706,
"learning_rate": 5.2024414127994325e-06,
"loss": 0.4538,
"step": 650
},
{
"epoch": 0.5425553944916132,
"grad_norm": 2.209414073449144,
"learning_rate": 5.13016178526756e-06,
"loss": 0.4448,
"step": 655
},
{
"epoch": 0.5466970387243736,
"grad_norm": 2.15118663280229,
"learning_rate": 5.057854927209804e-06,
"loss": 0.4446,
"step": 660
},
{
"epoch": 0.550838682957134,
"grad_norm": 2.033860154274491,
"learning_rate": 4.985535965598843e-06,
"loss": 0.4447,
"step": 665
},
{
"epoch": 0.5549803271898944,
"grad_norm": 2.129135189760016,
"learning_rate": 4.913220029939491e-06,
"loss": 0.4512,
"step": 670
},
{
"epoch": 0.5591219714226547,
"grad_norm": 2.090833400754654,
"learning_rate": 4.840922249103506e-06,
"loss": 0.4467,
"step": 675
},
{
"epoch": 0.5632636156554152,
"grad_norm": 2.0680502896348836,
"learning_rate": 4.7686577481645745e-06,
"loss": 0.4316,
"step": 680
},
{
"epoch": 0.5674052598881756,
"grad_norm": 2.07449578249477,
"learning_rate": 4.696441645234042e-06,
"loss": 0.4421,
"step": 685
},
{
"epoch": 0.571546904120936,
"grad_norm": 2.2932786860060235,
"learning_rate": 4.624289048298147e-06,
"loss": 0.4433,
"step": 690
},
{
"epoch": 0.5756885483536964,
"grad_norm": 2.058591317854592,
"learning_rate": 4.55221505205734e-06,
"loss": 0.4298,
"step": 695
},
{
"epoch": 0.5798301925864568,
"grad_norm": 2.3196625048015704,
"learning_rate": 4.480234734768393e-06,
"loss": 0.4326,
"step": 700
},
{
"epoch": 0.5839718368192173,
"grad_norm": 2.073473068971661,
"learning_rate": 4.408363155089952e-06,
"loss": 0.4335,
"step": 705
},
{
"epoch": 0.5881134810519776,
"grad_norm": 2.029766952429309,
"learning_rate": 4.3366153489321855e-06,
"loss": 0.4273,
"step": 710
},
{
"epoch": 0.592255125284738,
"grad_norm": 2.1078664280012007,
"learning_rate": 4.265006326311199e-06,
"loss": 0.415,
"step": 715
},
{
"epoch": 0.5963967695174984,
"grad_norm": 2.0038145759132915,
"learning_rate": 4.1935510682088545e-06,
"loss": 0.4244,
"step": 720
},
{
"epoch": 0.6005384137502588,
"grad_norm": 2.0468827081169976,
"learning_rate": 4.122264523438668e-06,
"loss": 0.4226,
"step": 725
},
{
"epoch": 0.6046800579830193,
"grad_norm": 2.0660110564277923,
"learning_rate": 4.051161605518453e-06,
"loss": 0.4222,
"step": 730
},
{
"epoch": 0.6088217022157797,
"grad_norm": 2.088150514221739,
"learning_rate": 3.980257189550316e-06,
"loss": 0.433,
"step": 735
},
{
"epoch": 0.6129633464485401,
"grad_norm": 2.1225320313052447,
"learning_rate": 3.909566109108727e-06,
"loss": 0.4161,
"step": 740
},
{
"epoch": 0.6171049906813004,
"grad_norm": 2.0990451349998227,
"learning_rate": 3.839103153137247e-06,
"loss": 0.417,
"step": 745
},
{
"epoch": 0.6212466349140608,
"grad_norm": 2.104242942119707,
"learning_rate": 3.768883062854598e-06,
"loss": 0.4081,
"step": 750
},
{
"epoch": 0.6253882791468213,
"grad_norm": 2.051137757693821,
"learning_rate": 3.6989205286707398e-06,
"loss": 0.4108,
"step": 755
},
{
"epoch": 0.6295299233795817,
"grad_norm": 2.099437836109555,
"learning_rate": 3.6292301871135425e-06,
"loss": 0.411,
"step": 760
},
{
"epoch": 0.6336715676123421,
"grad_norm": 2.056296390597689,
"learning_rate": 3.55982661776676e-06,
"loss": 0.4047,
"step": 765
},
{
"epoch": 0.6378132118451025,
"grad_norm": 2.0454981613636503,
"learning_rate": 3.4907243402199013e-06,
"loss": 0.4044,
"step": 770
},
{
"epoch": 0.641954856077863,
"grad_norm": 2.0625058430327092,
"learning_rate": 3.4219378110306523e-06,
"loss": 0.4103,
"step": 775
},
{
"epoch": 0.6460965003106233,
"grad_norm": 2.0640184519207123,
"learning_rate": 3.353481420700495e-06,
"loss": 0.4109,
"step": 780
},
{
"epoch": 0.6502381445433837,
"grad_norm": 2.096435391565135,
"learning_rate": 3.285369490664133e-06,
"loss": 0.4103,
"step": 785
},
{
"epoch": 0.6543797887761441,
"grad_norm": 2.0511959776297983,
"learning_rate": 3.2176162702933816e-06,
"loss": 0.3991,
"step": 790
},
{
"epoch": 0.6585214330089045,
"grad_norm": 2.004915794597741,
"learning_rate": 3.150235933916115e-06,
"loss": 0.401,
"step": 795
},
{
"epoch": 0.662663077241665,
"grad_norm": 1.989931694202458,
"learning_rate": 3.0832425778509235e-06,
"loss": 0.4015,
"step": 800
},
{
"epoch": 0.6668047214744254,
"grad_norm": 2.0927029677784383,
"learning_rate": 3.0166502174581012e-06,
"loss": 0.3904,
"step": 805
},
{
"epoch": 0.6709463657071858,
"grad_norm": 2.05569943435716,
"learning_rate": 2.950472784207544e-06,
"loss": 0.3976,
"step": 810
},
{
"epoch": 0.6750880099399461,
"grad_norm": 2.0070990032518456,
"learning_rate": 2.8847241227642255e-06,
"loss": 0.3855,
"step": 815
},
{
"epoch": 0.6792296541727065,
"grad_norm": 1.9791880977464777,
"learning_rate": 2.819417988091814e-06,
"loss": 0.3831,
"step": 820
},
{
"epoch": 0.683371298405467,
"grad_norm": 1.9874217780709027,
"learning_rate": 2.754568042575061e-06,
"loss": 0.3928,
"step": 825
},
{
"epoch": 0.6875129426382274,
"grad_norm": 1.9444889966862584,
"learning_rate": 2.6901878531615677e-06,
"loss": 0.3967,
"step": 830
},
{
"epoch": 0.6916545868709878,
"grad_norm": 2.190695001941064,
"learning_rate": 2.6262908885235046e-06,
"loss": 0.384,
"step": 835
},
{
"epoch": 0.6957962311037482,
"grad_norm": 1.991552664612379,
"learning_rate": 2.5628905162398797e-06,
"loss": 0.3831,
"step": 840
},
{
"epoch": 0.6999378753365086,
"grad_norm": 2.017973650880143,
"learning_rate": 2.5000000000000015e-06,
"loss": 0.3851,
"step": 845
},
{
"epoch": 0.704079519569269,
"grad_norm": 2.138390804530181,
"learning_rate": 2.4376324968286154e-06,
"loss": 0.3777,
"step": 850
},
{
"epoch": 0.7082211638020294,
"grad_norm": 2.0021049336250814,
"learning_rate": 2.375801054333409e-06,
"loss": 0.3891,
"step": 855
},
{
"epoch": 0.7123628080347898,
"grad_norm": 2.1027439407928505,
"learning_rate": 2.3145186079753685e-06,
"loss": 0.381,
"step": 860
},
{
"epoch": 0.7165044522675502,
"grad_norm": 2.0200252919367823,
"learning_rate": 2.253797978362617e-06,
"loss": 0.3754,
"step": 865
},
{
"epoch": 0.7206460965003106,
"grad_norm": 2.022944794755911,
"learning_rate": 2.193651868568285e-06,
"loss": 0.3719,
"step": 870
},
{
"epoch": 0.7247877407330711,
"grad_norm": 2.011383665562108,
"learning_rate": 2.1340928614729445e-06,
"loss": 0.3716,
"step": 875
},
{
"epoch": 0.7289293849658315,
"grad_norm": 2.1000401745759767,
"learning_rate": 2.075133417132223e-06,
"loss": 0.3773,
"step": 880
},
{
"epoch": 0.7330710291985918,
"grad_norm": 2.0322415551222277,
"learning_rate": 2.016785870170079e-06,
"loss": 0.3755,
"step": 885
},
{
"epoch": 0.7372126734313522,
"grad_norm": 2.0586335139183327,
"learning_rate": 1.9590624271983406e-06,
"loss": 0.3749,
"step": 890
},
{
"epoch": 0.7413543176641126,
"grad_norm": 2.057098686991852,
"learning_rate": 1.9019751642630252e-06,
"loss": 0.3733,
"step": 895
},
{
"epoch": 0.7454959618968731,
"grad_norm": 1.9856940958346814,
"learning_rate": 1.8455360243179537e-06,
"loss": 0.3737,
"step": 900
},
{
"epoch": 0.7496376061296335,
"grad_norm": 2.049797779671496,
"learning_rate": 1.7897568147262323e-06,
"loss": 0.3678,
"step": 905
},
{
"epoch": 0.7537792503623939,
"grad_norm": 2.1869162271482083,
"learning_rate": 1.7346492047900897e-06,
"loss": 0.3769,
"step": 910
},
{
"epoch": 0.7579208945951543,
"grad_norm": 2.0166177063427444,
"learning_rate": 1.6802247233095914e-06,
"loss": 0.3722,
"step": 915
},
{
"epoch": 0.7620625388279146,
"grad_norm": 2.0284630775550094,
"learning_rate": 1.626494756170765e-06,
"loss": 0.3562,
"step": 920
},
{
"epoch": 0.7662041830606751,
"grad_norm": 1.9844703515401159,
"learning_rate": 1.5734705439636017e-06,
"loss": 0.3641,
"step": 925
},
{
"epoch": 0.7703458272934355,
"grad_norm": 2.026880974919187,
"learning_rate": 1.5211631796304721e-06,
"loss": 0.3671,
"step": 930
},
{
"epoch": 0.7744874715261959,
"grad_norm": 2.2261033018640775,
"learning_rate": 1.46958360614543e-06,
"loss": 0.3677,
"step": 935
},
{
"epoch": 0.7786291157589563,
"grad_norm": 2.0419572811826527,
"learning_rate": 1.4187426142248723e-06,
"loss": 0.3567,
"step": 940
},
{
"epoch": 0.7827707599917167,
"grad_norm": 1.9436995231419443,
"learning_rate": 1.3686508400700787e-06,
"loss": 0.3659,
"step": 945
},
{
"epoch": 0.7869124042244772,
"grad_norm": 2.118970998919544,
"learning_rate": 1.3193187631420462e-06,
"loss": 0.3621,
"step": 950
},
{
"epoch": 0.7910540484572375,
"grad_norm": 2.059747456229496,
"learning_rate": 1.2707567039691505e-06,
"loss": 0.3565,
"step": 955
},
{
"epoch": 0.7951956926899979,
"grad_norm": 2.090689117796637,
"learning_rate": 1.222974821988024e-06,
"loss": 0.3583,
"step": 960
},
{
"epoch": 0.7993373369227583,
"grad_norm": 2.059076495278081,
"learning_rate": 1.1759831134181504e-06,
"loss": 0.3622,
"step": 965
},
{
"epoch": 0.8034789811555187,
"grad_norm": 1.9884532847703864,
"learning_rate": 1.1297914091706086e-06,
"loss": 0.3541,
"step": 970
},
{
"epoch": 0.8076206253882792,
"grad_norm": 1.8905068597211632,
"learning_rate": 1.0844093727913868e-06,
"loss": 0.3578,
"step": 975
},
{
"epoch": 0.8117622696210396,
"grad_norm": 2.0536970678158206,
"learning_rate": 1.039846498439727e-06,
"loss": 0.353,
"step": 980
},
{
"epoch": 0.8159039138538,
"grad_norm": 1.9394493842379006,
"learning_rate": 9.961121089018933e-07,
"loss": 0.3552,
"step": 985
},
{
"epoch": 0.8200455580865603,
"grad_norm": 1.9161234213176144,
"learning_rate": 9.532153536407923e-07,
"loss": 0.3572,
"step": 990
},
{
"epoch": 0.8241872023193207,
"grad_norm": 2.0679533219870394,
"learning_rate": 9.111652068818621e-07,
"loss": 0.3499,
"step": 995
},
{
"epoch": 0.8283288465520812,
"grad_norm": 2.007021263318756,
"learning_rate": 8.699704657356195e-07,
"loss": 0.3503,
"step": 1000
},
{
"epoch": 0.8324704907848416,
"grad_norm": 1.940869076922602,
"learning_rate": 8.296397483572515e-07,
"loss": 0.3588,
"step": 1005
},
{
"epoch": 0.836612135017602,
"grad_norm": 2.0578809556631774,
"learning_rate": 7.901814921436624e-07,
"loss": 0.3497,
"step": 1010
},
{
"epoch": 0.8407537792503624,
"grad_norm": 1.9963820836617243,
"learning_rate": 7.516039519683105e-07,
"loss": 0.3459,
"step": 1015
},
{
"epoch": 0.8448954234831229,
"grad_norm": 2.1312384802093707,
"learning_rate": 7.139151984542636e-07,
"loss": 0.3515,
"step": 1020
},
{
"epoch": 0.8490370677158832,
"grad_norm": 2.051969582922354,
"learning_rate": 6.771231162857722e-07,
"loss": 0.3497,
"step": 1025
},
{
"epoch": 0.8531787119486436,
"grad_norm": 1.944237335752014,
"learning_rate": 6.412354025587509e-07,
"loss": 0.3454,
"step": 1030
},
{
"epoch": 0.857320356181404,
"grad_norm": 2.0013927970500824,
"learning_rate": 6.062595651705111e-07,
"loss": 0.3484,
"step": 1035
},
{
"epoch": 0.8614620004141644,
"grad_norm": 1.9953241481292785,
"learning_rate": 5.722029212490666e-07,
"loss": 0.3467,
"step": 1040
},
{
"epoch": 0.8656036446469249,
"grad_norm": 1.8562753423514788,
"learning_rate": 5.390725956223531e-07,
"loss": 0.3439,
"step": 1045
},
{
"epoch": 0.8697452888796853,
"grad_norm": 2.014412721333329,
"learning_rate": 5.068755193276798e-07,
"loss": 0.3475,
"step": 1050
},
{
"epoch": 0.8738869331124457,
"grad_norm": 2.0883606449971013,
"learning_rate": 4.756184281617121e-07,
"loss": 0.3442,
"step": 1055
},
{
"epoch": 0.878028577345206,
"grad_norm": 1.9479502623498623,
"learning_rate": 4.4530786127131575e-07,
"loss": 0.3516,
"step": 1060
},
{
"epoch": 0.8821702215779664,
"grad_norm": 1.9878832197736611,
"learning_rate": 4.159501597855287e-07,
"loss": 0.3468,
"step": 1065
},
{
"epoch": 0.8863118658107269,
"grad_norm": 1.981044415868457,
"learning_rate": 3.8755146548896784e-07,
"loss": 0.3442,
"step": 1070
},
{
"epoch": 0.8904535100434873,
"grad_norm": 2.078622119500157,
"learning_rate": 3.6011771953693044e-07,
"loss": 0.3414,
"step": 1075
},
{
"epoch": 0.8945951542762477,
"grad_norm": 1.9832065578238605,
"learning_rate": 3.336546612124758e-07,
"loss": 0.3462,
"step": 1080
},
{
"epoch": 0.8987367985090081,
"grad_norm": 2.077188737775822,
"learning_rate": 3.081678267257404e-07,
"loss": 0.3445,
"step": 1085
},
{
"epoch": 0.9028784427417684,
"grad_norm": 1.9680080545892447,
"learning_rate": 2.836625480557265e-07,
"loss": 0.3433,
"step": 1090
},
{
"epoch": 0.9070200869745288,
"grad_norm": 2.0554539318334344,
"learning_rate": 2.601439518348331e-07,
"loss": 0.3411,
"step": 1095
},
{
"epoch": 0.9111617312072893,
"grad_norm": 1.9738460247856697,
"learning_rate": 2.376169582763288e-07,
"loss": 0.3423,
"step": 1100
},
{
"epoch": 0.9153033754400497,
"grad_norm": 2.100671455290073,
"learning_rate": 2.1608628014502364e-07,
"loss": 0.3412,
"step": 1105
},
{
"epoch": 0.9194450196728101,
"grad_norm": 2.0790276531527865,
"learning_rate": 1.955564217713335e-07,
"loss": 0.3376,
"step": 1110
},
{
"epoch": 0.9235866639055705,
"grad_norm": 2.039950191776352,
"learning_rate": 1.7603167810894662e-07,
"loss": 0.3406,
"step": 1115
},
{
"epoch": 0.927728308138331,
"grad_norm": 2.0162487373459435,
"learning_rate": 1.5751613383630128e-07,
"loss": 0.3465,
"step": 1120
},
{
"epoch": 0.9318699523710913,
"grad_norm": 2.092197022372298,
"learning_rate": 1.4001366250204762e-07,
"loss": 0.337,
"step": 1125
},
{
"epoch": 0.9360115966038517,
"grad_norm": 1.944362051868436,
"learning_rate": 1.235279257146804e-07,
"loss": 0.3378,
"step": 1130
},
{
"epoch": 0.9401532408366121,
"grad_norm": 2.06431956869675,
"learning_rate": 1.080623723765134e-07,
"loss": 0.3352,
"step": 1135
},
{
"epoch": 0.9442948850693725,
"grad_norm": 2.067267199918096,
"learning_rate": 9.362023796215036e-08,
"loss": 0.3385,
"step": 1140
},
{
"epoch": 0.948436529302133,
"grad_norm": 2.050374938357445,
"learning_rate": 8.020454384160437e-08,
"loss": 0.345,
"step": 1145
},
{
"epoch": 0.9525781735348934,
"grad_norm": 2.058842049841172,
"learning_rate": 6.78180966482156e-08,
"loss": 0.3431,
"step": 1150
},
{
"epoch": 0.9567198177676538,
"grad_norm": 2.1228820426864825,
"learning_rate": 5.646348769148491e-08,
"loss": 0.3415,
"step": 1155
},
{
"epoch": 0.9608614620004141,
"grad_norm": 1.9574417210663868,
"learning_rate": 4.6143092414961396e-08,
"loss": 0.3346,
"step": 1160
},
{
"epoch": 0.9650031062331745,
"grad_norm": 2.0568028289295293,
"learning_rate": 3.685906989928656e-08,
"loss": 0.3404,
"step": 1165
},
{
"epoch": 0.969144750465935,
"grad_norm": 2.187483815397574,
"learning_rate": 2.861336241050061e-08,
"loss": 0.3366,
"step": 1170
},
{
"epoch": 0.9732863946986954,
"grad_norm": 2.0602914938549626,
"learning_rate": 2.1407694993714755e-08,
"loss": 0.3419,
"step": 1175
},
{
"epoch": 0.9774280389314558,
"grad_norm": 2.0097264769694174,
"learning_rate": 1.5243575112218744e-08,
"loss": 0.3391,
"step": 1180
},
{
"epoch": 0.9815696831642162,
"grad_norm": 2.1300950765468456,
"learning_rate": 1.0122292332114814e-08,
"loss": 0.3479,
"step": 1185
},
{
"epoch": 0.9857113273969766,
"grad_norm": 1.9814636182776308,
"learning_rate": 6.044918052531268e-09,
"loss": 0.3359,
"step": 1190
},
{
"epoch": 0.989852971629737,
"grad_norm": 2.0905112637682053,
"learning_rate": 3.0123052814812203e-09,
"loss": 0.3383,
"step": 1195
},
{
"epoch": 0.9939946158624974,
"grad_norm": 1.966590567136286,
"learning_rate": 1.025088457409229e-09,
"loss": 0.3325,
"step": 1200
},
{
"epoch": 0.9981362600952578,
"grad_norm": 2.035603528671296,
"learning_rate": 8.368331646302353e-11,
"loss": 0.3416,
"step": 1205
},
{
"epoch": 0.999792917788362,
"eval_loss": 0.3488326072692871,
"eval_runtime": 0.9501,
"eval_samples_per_second": 3.158,
"eval_steps_per_second": 1.053,
"step": 1207
},
{
"epoch": 0.999792917788362,
"step": 1207,
"total_flos": 252668899491840.0,
"train_loss": 0.5272446889569172,
"train_runtime": 29415.576,
"train_samples_per_second": 1.313,
"train_steps_per_second": 0.041
}
],
"logging_steps": 5,
"max_steps": 1207,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 252668899491840.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}