qwen_feedback_dirty_0.5 / trainer_state.json
terry69's picture
Model save
5b44512 verified
raw
history blame
58.9 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 1661,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0006020469596628537,
"grad_norm": 27.22392857163454,
"learning_rate": 5.98802395209581e-08,
"loss": 1.971,
"step": 1
},
{
"epoch": 0.0030102347983142685,
"grad_norm": 13.65386668389746,
"learning_rate": 2.9940119760479047e-07,
"loss": 1.9968,
"step": 5
},
{
"epoch": 0.006020469596628537,
"grad_norm": 12.456315967368859,
"learning_rate": 5.988023952095809e-07,
"loss": 1.9813,
"step": 10
},
{
"epoch": 0.009030704394942806,
"grad_norm": 9.02936644779285,
"learning_rate": 8.982035928143713e-07,
"loss": 1.9658,
"step": 15
},
{
"epoch": 0.012040939193257074,
"grad_norm": 7.460657498700517,
"learning_rate": 1.1976047904191619e-06,
"loss": 1.9351,
"step": 20
},
{
"epoch": 0.015051173991571343,
"grad_norm": 10.46692979910004,
"learning_rate": 1.4970059880239521e-06,
"loss": 1.892,
"step": 25
},
{
"epoch": 0.018061408789885613,
"grad_norm": 3.106659804143662,
"learning_rate": 1.7964071856287426e-06,
"loss": 1.8184,
"step": 30
},
{
"epoch": 0.02107164358819988,
"grad_norm": 3.4501878453247015,
"learning_rate": 2.095808383233533e-06,
"loss": 1.7497,
"step": 35
},
{
"epoch": 0.024081878386514148,
"grad_norm": 6.687362074963905,
"learning_rate": 2.3952095808383237e-06,
"loss": 1.7088,
"step": 40
},
{
"epoch": 0.027092113184828417,
"grad_norm": 1.7523213563959052,
"learning_rate": 2.694610778443114e-06,
"loss": 1.693,
"step": 45
},
{
"epoch": 0.030102347983142687,
"grad_norm": 7.280651812563955,
"learning_rate": 2.9940119760479042e-06,
"loss": 1.6146,
"step": 50
},
{
"epoch": 0.033112582781456956,
"grad_norm": 1.9204330636460694,
"learning_rate": 3.2934131736526947e-06,
"loss": 1.5607,
"step": 55
},
{
"epoch": 0.036122817579771226,
"grad_norm": 4.242920324801297,
"learning_rate": 3.592814371257485e-06,
"loss": 1.5187,
"step": 60
},
{
"epoch": 0.03913305237808549,
"grad_norm": 3.221869314237582,
"learning_rate": 3.892215568862276e-06,
"loss": 1.4559,
"step": 65
},
{
"epoch": 0.04214328717639976,
"grad_norm": 1.1211647421609696,
"learning_rate": 4.191616766467066e-06,
"loss": 1.4626,
"step": 70
},
{
"epoch": 0.04515352197471403,
"grad_norm": 18.99681558637136,
"learning_rate": 4.4910179640718566e-06,
"loss": 1.4324,
"step": 75
},
{
"epoch": 0.048163756773028296,
"grad_norm": 0.9850763475643952,
"learning_rate": 4.7904191616766475e-06,
"loss": 1.3939,
"step": 80
},
{
"epoch": 0.051173991571342566,
"grad_norm": 1.6779178850544632,
"learning_rate": 5.0898203592814375e-06,
"loss": 1.4141,
"step": 85
},
{
"epoch": 0.054184226369656835,
"grad_norm": 0.9258382254032997,
"learning_rate": 5.389221556886228e-06,
"loss": 1.3921,
"step": 90
},
{
"epoch": 0.057194461167971104,
"grad_norm": 0.9127215663526058,
"learning_rate": 5.6886227544910184e-06,
"loss": 1.3879,
"step": 95
},
{
"epoch": 0.060204695966285374,
"grad_norm": 0.9765773402128775,
"learning_rate": 5.9880239520958085e-06,
"loss": 1.3614,
"step": 100
},
{
"epoch": 0.06321493076459964,
"grad_norm": 0.9543473669785393,
"learning_rate": 6.2874251497005985e-06,
"loss": 1.3517,
"step": 105
},
{
"epoch": 0.06622516556291391,
"grad_norm": 0.9289846325699406,
"learning_rate": 6.586826347305389e-06,
"loss": 1.3372,
"step": 110
},
{
"epoch": 0.06923540036122817,
"grad_norm": 0.9873011952453609,
"learning_rate": 6.88622754491018e-06,
"loss": 1.3378,
"step": 115
},
{
"epoch": 0.07224563515954245,
"grad_norm": 1.0087695630072344,
"learning_rate": 7.18562874251497e-06,
"loss": 1.3286,
"step": 120
},
{
"epoch": 0.07525586995785671,
"grad_norm": 2.041087321823113,
"learning_rate": 7.485029940119761e-06,
"loss": 1.33,
"step": 125
},
{
"epoch": 0.07826610475617098,
"grad_norm": 4.129687925158122,
"learning_rate": 7.784431137724551e-06,
"loss": 1.3031,
"step": 130
},
{
"epoch": 0.08127633955448525,
"grad_norm": 0.9348500545655409,
"learning_rate": 8.083832335329342e-06,
"loss": 1.3204,
"step": 135
},
{
"epoch": 0.08428657435279951,
"grad_norm": 0.9282851253114677,
"learning_rate": 8.383233532934131e-06,
"loss": 1.2841,
"step": 140
},
{
"epoch": 0.08729680915111379,
"grad_norm": 0.9100216621816694,
"learning_rate": 8.682634730538922e-06,
"loss": 1.2858,
"step": 145
},
{
"epoch": 0.09030704394942805,
"grad_norm": 0.9981824581989874,
"learning_rate": 8.982035928143713e-06,
"loss": 1.2704,
"step": 150
},
{
"epoch": 0.09331727874774233,
"grad_norm": 0.9537988209000966,
"learning_rate": 9.281437125748504e-06,
"loss": 1.3038,
"step": 155
},
{
"epoch": 0.09632751354605659,
"grad_norm": 0.9601755129617946,
"learning_rate": 9.580838323353295e-06,
"loss": 1.2969,
"step": 160
},
{
"epoch": 0.09933774834437085,
"grad_norm": 0.9847338523503186,
"learning_rate": 9.880239520958084e-06,
"loss": 1.2694,
"step": 165
},
{
"epoch": 0.10234798314268513,
"grad_norm": 0.9800990746472159,
"learning_rate": 9.999900509954779e-06,
"loss": 1.2673,
"step": 170
},
{
"epoch": 0.1053582179409994,
"grad_norm": 1.0362064979246783,
"learning_rate": 9.999292529572152e-06,
"loss": 1.2687,
"step": 175
},
{
"epoch": 0.10836845273931367,
"grad_norm": 0.97331445513859,
"learning_rate": 9.998131908181262e-06,
"loss": 1.2693,
"step": 180
},
{
"epoch": 0.11137868753762793,
"grad_norm": 0.9597440336445656,
"learning_rate": 9.996418774081658e-06,
"loss": 1.2523,
"step": 185
},
{
"epoch": 0.11438892233594221,
"grad_norm": 1.0622289859021092,
"learning_rate": 9.994153316649769e-06,
"loss": 1.2726,
"step": 190
},
{
"epoch": 0.11739915713425647,
"grad_norm": 0.9667990321619035,
"learning_rate": 9.991335786317964e-06,
"loss": 1.2551,
"step": 195
},
{
"epoch": 0.12040939193257075,
"grad_norm": 1.0072941793405643,
"learning_rate": 9.987966494546873e-06,
"loss": 1.2255,
"step": 200
},
{
"epoch": 0.12341962673088501,
"grad_norm": 0.9596767117485965,
"learning_rate": 9.984045813790959e-06,
"loss": 1.2465,
"step": 205
},
{
"epoch": 0.12642986152919927,
"grad_norm": 1.0015445744889278,
"learning_rate": 9.979574177457337e-06,
"loss": 1.2463,
"step": 210
},
{
"epoch": 0.12944009632751355,
"grad_norm": 0.9779464829184125,
"learning_rate": 9.974552079857873e-06,
"loss": 1.2423,
"step": 215
},
{
"epoch": 0.13245033112582782,
"grad_norm": 0.9588810325174535,
"learning_rate": 9.968980076154533e-06,
"loss": 1.2515,
"step": 220
},
{
"epoch": 0.13546056592414207,
"grad_norm": 0.9762836295455931,
"learning_rate": 9.962858782298023e-06,
"loss": 1.2317,
"step": 225
},
{
"epoch": 0.13847080072245635,
"grad_norm": 0.9455765624623369,
"learning_rate": 9.956188874959686e-06,
"loss": 1.23,
"step": 230
},
{
"epoch": 0.14148103552077063,
"grad_norm": 1.0254791337431468,
"learning_rate": 9.948971091456715e-06,
"loss": 1.2313,
"step": 235
},
{
"epoch": 0.1444912703190849,
"grad_norm": 1.0494300760638828,
"learning_rate": 9.941206229670634e-06,
"loss": 1.2203,
"step": 240
},
{
"epoch": 0.14750150511739915,
"grad_norm": 1.0130193836029537,
"learning_rate": 9.932895147959106e-06,
"loss": 1.2038,
"step": 245
},
{
"epoch": 0.15051173991571343,
"grad_norm": 1.0190268775942266,
"learning_rate": 9.924038765061042e-06,
"loss": 1.1867,
"step": 250
},
{
"epoch": 0.1535219747140277,
"grad_norm": 1.0637623816845347,
"learning_rate": 9.91463805999504e-06,
"loss": 1.2114,
"step": 255
},
{
"epoch": 0.15653220951234195,
"grad_norm": 1.0582426619825864,
"learning_rate": 9.904694071951167e-06,
"loss": 1.2119,
"step": 260
},
{
"epoch": 0.15954244431065623,
"grad_norm": 1.0051521209552554,
"learning_rate": 9.894207900176074e-06,
"loss": 1.195,
"step": 265
},
{
"epoch": 0.1625526791089705,
"grad_norm": 1.006499396359629,
"learning_rate": 9.883180703851488e-06,
"loss": 1.2009,
"step": 270
},
{
"epoch": 0.16556291390728478,
"grad_norm": 0.9907891015612423,
"learning_rate": 9.871613701966067e-06,
"loss": 1.1802,
"step": 275
},
{
"epoch": 0.16857314870559903,
"grad_norm": 0.9353673363284244,
"learning_rate": 9.859508173180653e-06,
"loss": 1.1715,
"step": 280
},
{
"epoch": 0.1715833835039133,
"grad_norm": 0.9848000031544851,
"learning_rate": 9.846865455686915e-06,
"loss": 1.1751,
"step": 285
},
{
"epoch": 0.17459361830222758,
"grad_norm": 0.9778599958159084,
"learning_rate": 9.833686947059436e-06,
"loss": 1.1659,
"step": 290
},
{
"epoch": 0.17760385310054183,
"grad_norm": 1.0197914549232654,
"learning_rate": 9.819974104101198e-06,
"loss": 1.1609,
"step": 295
},
{
"epoch": 0.1806140878988561,
"grad_norm": 0.9700422577443836,
"learning_rate": 9.80572844268256e-06,
"loss": 1.1489,
"step": 300
},
{
"epoch": 0.18362432269717038,
"grad_norm": 1.0249498510324442,
"learning_rate": 9.790951537573686e-06,
"loss": 1.1829,
"step": 305
},
{
"epoch": 0.18663455749548466,
"grad_norm": 1.0644946420369092,
"learning_rate": 9.775645022270448e-06,
"loss": 1.164,
"step": 310
},
{
"epoch": 0.1896447922937989,
"grad_norm": 0.9867050366956028,
"learning_rate": 9.759810588813872e-06,
"loss": 1.1648,
"step": 315
},
{
"epoch": 0.19265502709211318,
"grad_norm": 1.1128215197644398,
"learning_rate": 9.743449987603082e-06,
"loss": 1.16,
"step": 320
},
{
"epoch": 0.19566526189042746,
"grad_norm": 1.0379238093017742,
"learning_rate": 9.726565027201813e-06,
"loss": 1.173,
"step": 325
},
{
"epoch": 0.1986754966887417,
"grad_norm": 1.037290324574154,
"learning_rate": 9.70915757413847e-06,
"loss": 1.1611,
"step": 330
},
{
"epoch": 0.20168573148705599,
"grad_norm": 1.0384997352637115,
"learning_rate": 9.691229552699817e-06,
"loss": 1.1398,
"step": 335
},
{
"epoch": 0.20469596628537026,
"grad_norm": 1.1360507992714308,
"learning_rate": 9.672782944718234e-06,
"loss": 1.1652,
"step": 340
},
{
"epoch": 0.20770620108368454,
"grad_norm": 1.055086294709249,
"learning_rate": 9.65381978935266e-06,
"loss": 1.139,
"step": 345
},
{
"epoch": 0.2107164358819988,
"grad_norm": 1.0570445286606438,
"learning_rate": 9.634342182863163e-06,
"loss": 1.1155,
"step": 350
},
{
"epoch": 0.21372667068031306,
"grad_norm": 1.0499986459043307,
"learning_rate": 9.614352278379217e-06,
"loss": 1.1484,
"step": 355
},
{
"epoch": 0.21673690547862734,
"grad_norm": 1.072674005812888,
"learning_rate": 9.593852285661684e-06,
"loss": 1.1546,
"step": 360
},
{
"epoch": 0.2197471402769416,
"grad_norm": 1.026680216356016,
"learning_rate": 9.572844470858537e-06,
"loss": 1.1362,
"step": 365
},
{
"epoch": 0.22275737507525586,
"grad_norm": 1.154893182120373,
"learning_rate": 9.551331156254358e-06,
"loss": 1.1375,
"step": 370
},
{
"epoch": 0.22576760987357014,
"grad_norm": 1.030915882210545,
"learning_rate": 9.529314720013618e-06,
"loss": 1.1246,
"step": 375
},
{
"epoch": 0.22877784467188442,
"grad_norm": 1.0275709672820799,
"learning_rate": 9.506797595917787e-06,
"loss": 1.1306,
"step": 380
},
{
"epoch": 0.23178807947019867,
"grad_norm": 1.0612700071573922,
"learning_rate": 9.483782273096295e-06,
"loss": 1.1291,
"step": 385
},
{
"epoch": 0.23479831426851294,
"grad_norm": 1.0249725155040352,
"learning_rate": 9.460271295751373e-06,
"loss": 1.1428,
"step": 390
},
{
"epoch": 0.23780854906682722,
"grad_norm": 1.01241424823746,
"learning_rate": 9.436267262876808e-06,
"loss": 1.1195,
"step": 395
},
{
"epoch": 0.2408187838651415,
"grad_norm": 1.0678914761402232,
"learning_rate": 9.411772827970642e-06,
"loss": 1.1164,
"step": 400
},
{
"epoch": 0.24382901866345574,
"grad_norm": 1.063653307977233,
"learning_rate": 9.38679069874184e-06,
"loss": 1.1184,
"step": 405
},
{
"epoch": 0.24683925346177002,
"grad_norm": 1.0166853640511404,
"learning_rate": 9.36132363681097e-06,
"loss": 1.1186,
"step": 410
},
{
"epoch": 0.2498494882600843,
"grad_norm": 1.0224474900808593,
"learning_rate": 9.335374457404928e-06,
"loss": 1.1259,
"step": 415
},
{
"epoch": 0.25285972305839854,
"grad_norm": 1.025396194648956,
"learning_rate": 9.308946029045726e-06,
"loss": 1.0992,
"step": 420
},
{
"epoch": 0.25586995785671285,
"grad_norm": 1.0666081761901238,
"learning_rate": 9.282041273233402e-06,
"loss": 1.0911,
"step": 425
},
{
"epoch": 0.2588801926550271,
"grad_norm": 1.036681992509278,
"learning_rate": 9.254663164123052e-06,
"loss": 1.0926,
"step": 430
},
{
"epoch": 0.26189042745334135,
"grad_norm": 0.9981467921863433,
"learning_rate": 9.226814728196072e-06,
"loss": 1.0945,
"step": 435
},
{
"epoch": 0.26490066225165565,
"grad_norm": 0.9968534019134379,
"learning_rate": 9.198499043925591e-06,
"loss": 1.1036,
"step": 440
},
{
"epoch": 0.2679108970499699,
"grad_norm": 1.0126539822191232,
"learning_rate": 9.169719241436162e-06,
"loss": 1.0986,
"step": 445
},
{
"epoch": 0.27092113184828415,
"grad_norm": 1.0017070369951004,
"learning_rate": 9.14047850215775e-06,
"loss": 1.0811,
"step": 450
},
{
"epoch": 0.27393136664659845,
"grad_norm": 1.0316940440472975,
"learning_rate": 9.110780058474052e-06,
"loss": 1.0751,
"step": 455
},
{
"epoch": 0.2769416014449127,
"grad_norm": 1.0365583639260507,
"learning_rate": 9.080627193365155e-06,
"loss": 1.0937,
"step": 460
},
{
"epoch": 0.27995183624322695,
"grad_norm": 0.9885372663237562,
"learning_rate": 9.050023240044649e-06,
"loss": 1.0932,
"step": 465
},
{
"epoch": 0.28296207104154125,
"grad_norm": 0.9917660998350917,
"learning_rate": 9.018971581591141e-06,
"loss": 1.0805,
"step": 470
},
{
"epoch": 0.2859723058398555,
"grad_norm": 0.9746303627143975,
"learning_rate": 8.987475650574289e-06,
"loss": 1.1045,
"step": 475
},
{
"epoch": 0.2889825406381698,
"grad_norm": 1.0633991398130442,
"learning_rate": 8.955538928675343e-06,
"loss": 1.0665,
"step": 480
},
{
"epoch": 0.29199277543648405,
"grad_norm": 1.0294806019647922,
"learning_rate": 8.923164946302274e-06,
"loss": 1.0838,
"step": 485
},
{
"epoch": 0.2950030102347983,
"grad_norm": 1.1636067238442809,
"learning_rate": 8.890357282199504e-06,
"loss": 1.0933,
"step": 490
},
{
"epoch": 0.2980132450331126,
"grad_norm": 1.0725517112186367,
"learning_rate": 8.857119563052301e-06,
"loss": 1.0633,
"step": 495
},
{
"epoch": 0.30102347983142685,
"grad_norm": 0.9985063194122187,
"learning_rate": 8.823455463085873e-06,
"loss": 1.0776,
"step": 500
},
{
"epoch": 0.3040337146297411,
"grad_norm": 1.011513088054301,
"learning_rate": 8.789368703659199e-06,
"loss": 1.0851,
"step": 505
},
{
"epoch": 0.3070439494280554,
"grad_norm": 0.9961319702573649,
"learning_rate": 8.754863052853658e-06,
"loss": 1.0773,
"step": 510
},
{
"epoch": 0.31005418422636966,
"grad_norm": 1.0537373514794217,
"learning_rate": 8.719942325056496e-06,
"loss": 1.0544,
"step": 515
},
{
"epoch": 0.3130644190246839,
"grad_norm": 0.9560546370265528,
"learning_rate": 8.68461038053916e-06,
"loss": 1.0604,
"step": 520
},
{
"epoch": 0.3160746538229982,
"grad_norm": 1.0998924700572033,
"learning_rate": 8.648871125030576e-06,
"loss": 1.0668,
"step": 525
},
{
"epoch": 0.31908488862131246,
"grad_norm": 0.9922592199137708,
"learning_rate": 8.612728509285395e-06,
"loss": 1.0564,
"step": 530
},
{
"epoch": 0.3220951234196267,
"grad_norm": 0.9990925710032598,
"learning_rate": 8.576186528647253e-06,
"loss": 1.0765,
"step": 535
},
{
"epoch": 0.325105358217941,
"grad_norm": 0.9867645343873487,
"learning_rate": 8.53924922260712e-06,
"loss": 1.0679,
"step": 540
},
{
"epoch": 0.32811559301625526,
"grad_norm": 1.0504215303129572,
"learning_rate": 8.501920674356755e-06,
"loss": 1.0464,
"step": 545
},
{
"epoch": 0.33112582781456956,
"grad_norm": 1.0003566271578537,
"learning_rate": 8.46420501033733e-06,
"loss": 1.0602,
"step": 550
},
{
"epoch": 0.3341360626128838,
"grad_norm": 1.012902103958297,
"learning_rate": 8.42610639978329e-06,
"loss": 1.0562,
"step": 555
},
{
"epoch": 0.33714629741119806,
"grad_norm": 0.9689824382443799,
"learning_rate": 8.387629054261454e-06,
"loss": 1.0511,
"step": 560
},
{
"epoch": 0.34015653220951236,
"grad_norm": 0.9592977237689682,
"learning_rate": 8.348777227205462e-06,
"loss": 1.0496,
"step": 565
},
{
"epoch": 0.3431667670078266,
"grad_norm": 1.0133498095085518,
"learning_rate": 8.309555213445583e-06,
"loss": 1.0738,
"step": 570
},
{
"epoch": 0.34617700180614086,
"grad_norm": 0.9599492179758512,
"learning_rate": 8.269967348733947e-06,
"loss": 1.0446,
"step": 575
},
{
"epoch": 0.34918723660445516,
"grad_norm": 0.9681301740683113,
"learning_rate": 8.230018009265255e-06,
"loss": 1.0596,
"step": 580
},
{
"epoch": 0.3521974714027694,
"grad_norm": 0.9373524338466983,
"learning_rate": 8.189711611193012e-06,
"loss": 1.051,
"step": 585
},
{
"epoch": 0.35520770620108366,
"grad_norm": 0.9670245571139093,
"learning_rate": 8.149052610141357e-06,
"loss": 1.0429,
"step": 590
},
{
"epoch": 0.35821794099939797,
"grad_norm": 0.9510561814037846,
"learning_rate": 8.108045500712518e-06,
"loss": 1.058,
"step": 595
},
{
"epoch": 0.3612281757977122,
"grad_norm": 0.9936813505342955,
"learning_rate": 8.066694815989961e-06,
"loss": 1.0603,
"step": 600
},
{
"epoch": 0.36423841059602646,
"grad_norm": 0.9843981727904988,
"learning_rate": 8.025005127037282e-06,
"loss": 1.0606,
"step": 605
},
{
"epoch": 0.36724864539434077,
"grad_norm": 0.9874316056017197,
"learning_rate": 7.982981042392907e-06,
"loss": 1.0493,
"step": 610
},
{
"epoch": 0.370258880192655,
"grad_norm": 0.9836042531874883,
"learning_rate": 7.940627207560655e-06,
"loss": 1.0419,
"step": 615
},
{
"epoch": 0.3732691149909693,
"grad_norm": 0.9349031264216233,
"learning_rate": 7.897948304496189e-06,
"loss": 1.0634,
"step": 620
},
{
"epoch": 0.37627934978928357,
"grad_norm": 0.9504127117294803,
"learning_rate": 7.854949051089467e-06,
"loss": 1.0665,
"step": 625
},
{
"epoch": 0.3792895845875978,
"grad_norm": 0.9222019819892415,
"learning_rate": 7.811634200643202e-06,
"loss": 1.072,
"step": 630
},
{
"epoch": 0.3822998193859121,
"grad_norm": 0.9584859569960068,
"learning_rate": 7.768008541347423e-06,
"loss": 1.021,
"step": 635
},
{
"epoch": 0.38531005418422637,
"grad_norm": 0.9322129259432775,
"learning_rate": 7.72407689575016e-06,
"loss": 1.05,
"step": 640
},
{
"epoch": 0.3883202889825406,
"grad_norm": 0.9418042647481683,
"learning_rate": 7.67984412022434e-06,
"loss": 1.0451,
"step": 645
},
{
"epoch": 0.3913305237808549,
"grad_norm": 0.9966983630078914,
"learning_rate": 7.635315104430959e-06,
"loss": 1.0355,
"step": 650
},
{
"epoch": 0.39434075857916917,
"grad_norm": 0.9572847678319176,
"learning_rate": 7.5904947707785434e-06,
"loss": 1.0299,
"step": 655
},
{
"epoch": 0.3973509933774834,
"grad_norm": 0.9275188841478791,
"learning_rate": 7.545388073879018e-06,
"loss": 1.058,
"step": 660
},
{
"epoch": 0.4003612281757977,
"grad_norm": 0.9417371682221568,
"learning_rate": 7.500000000000001e-06,
"loss": 1.0397,
"step": 665
},
{
"epoch": 0.40337146297411197,
"grad_norm": 0.9681489741098089,
"learning_rate": 7.454335566513603e-06,
"loss": 1.0465,
"step": 670
},
{
"epoch": 0.4063816977724263,
"grad_norm": 0.9595108028776653,
"learning_rate": 7.408399821341787e-06,
"loss": 1.0534,
"step": 675
},
{
"epoch": 0.4093919325707405,
"grad_norm": 0.9062681118546124,
"learning_rate": 7.362197842398355e-06,
"loss": 1.0327,
"step": 680
},
{
"epoch": 0.4124021673690548,
"grad_norm": 0.9467262280598835,
"learning_rate": 7.315734737027612e-06,
"loss": 1.0437,
"step": 685
},
{
"epoch": 0.4154124021673691,
"grad_norm": 0.9509979239409292,
"learning_rate": 7.2690156414397775e-06,
"loss": 1.0234,
"step": 690
},
{
"epoch": 0.4184226369656833,
"grad_norm": 0.9515966339436767,
"learning_rate": 7.22204572014322e-06,
"loss": 1.0322,
"step": 695
},
{
"epoch": 0.4214328717639976,
"grad_norm": 0.9727663910614524,
"learning_rate": 7.174830165373542e-06,
"loss": 1.0418,
"step": 700
},
{
"epoch": 0.4244431065623119,
"grad_norm": 0.9381652326646253,
"learning_rate": 7.127374196519616e-06,
"loss": 1.0321,
"step": 705
},
{
"epoch": 0.4274533413606261,
"grad_norm": 0.9699886941897933,
"learning_rate": 7.079683059546607e-06,
"loss": 1.0449,
"step": 710
},
{
"epoch": 0.4304635761589404,
"grad_norm": 0.9241563520898756,
"learning_rate": 7.031762026416074e-06,
"loss": 1.0246,
"step": 715
},
{
"epoch": 0.4334738109572547,
"grad_norm": 0.993265487831661,
"learning_rate": 6.983616394503177e-06,
"loss": 1.0431,
"step": 720
},
{
"epoch": 0.43648404575556893,
"grad_norm": 0.9628627003488671,
"learning_rate": 6.9352514860110876e-06,
"loss": 1.0662,
"step": 725
},
{
"epoch": 0.4394942805538832,
"grad_norm": 0.9461416922766445,
"learning_rate": 6.886672647382653e-06,
"loss": 1.0316,
"step": 730
},
{
"epoch": 0.4425045153521975,
"grad_norm": 0.9806980915933521,
"learning_rate": 6.837885248709386e-06,
"loss": 1.064,
"step": 735
},
{
"epoch": 0.44551475015051173,
"grad_norm": 0.928373225003967,
"learning_rate": 6.788894683137822e-06,
"loss": 1.0276,
"step": 740
},
{
"epoch": 0.44852498494882603,
"grad_norm": 0.9575671889236174,
"learning_rate": 6.739706366273346e-06,
"loss": 1.038,
"step": 745
},
{
"epoch": 0.4515352197471403,
"grad_norm": 0.9046994590336265,
"learning_rate": 6.690325735581532e-06,
"loss": 1.0282,
"step": 750
},
{
"epoch": 0.45454545454545453,
"grad_norm": 0.9151429388919339,
"learning_rate": 6.640758249787067e-06,
"loss": 1.048,
"step": 755
},
{
"epoch": 0.45755568934376883,
"grad_norm": 0.9419132302997139,
"learning_rate": 6.591009388270315e-06,
"loss": 1.0384,
"step": 760
},
{
"epoch": 0.4605659241420831,
"grad_norm": 1.0005185024840373,
"learning_rate": 6.54108465046161e-06,
"loss": 1.04,
"step": 765
},
{
"epoch": 0.46357615894039733,
"grad_norm": 0.9280344189136297,
"learning_rate": 6.490989555233328e-06,
"loss": 1.0355,
"step": 770
},
{
"epoch": 0.46658639373871164,
"grad_norm": 0.9191484105559773,
"learning_rate": 6.440729640289809e-06,
"loss": 1.0249,
"step": 775
},
{
"epoch": 0.4695966285370259,
"grad_norm": 0.9528558636067395,
"learning_rate": 6.3903104615551956e-06,
"loss": 1.0191,
"step": 780
},
{
"epoch": 0.47260686333534013,
"grad_norm": 0.8848506972001927,
"learning_rate": 6.3397375925592675e-06,
"loss": 1.0023,
"step": 785
},
{
"epoch": 0.47561709813365444,
"grad_norm": 0.8812677285723143,
"learning_rate": 6.289016623821308e-06,
"loss": 1.0378,
"step": 790
},
{
"epoch": 0.4786273329319687,
"grad_norm": 0.9103487891871948,
"learning_rate": 6.2381531622321234e-06,
"loss": 1.0393,
"step": 795
},
{
"epoch": 0.481637567730283,
"grad_norm": 0.9337809046875775,
"learning_rate": 6.18715283043422e-06,
"loss": 1.0287,
"step": 800
},
{
"epoch": 0.48464780252859724,
"grad_norm": 0.9131887229741218,
"learning_rate": 6.136021266200271e-06,
"loss": 1.0303,
"step": 805
},
{
"epoch": 0.4876580373269115,
"grad_norm": 0.9069697577426293,
"learning_rate": 6.084764121809878e-06,
"loss": 1.0307,
"step": 810
},
{
"epoch": 0.4906682721252258,
"grad_norm": 0.940336145640042,
"learning_rate": 6.033387063424765e-06,
"loss": 1.0198,
"step": 815
},
{
"epoch": 0.49367850692354004,
"grad_norm": 0.898801668461195,
"learning_rate": 5.9818957704624046e-06,
"loss": 1.0458,
"step": 820
},
{
"epoch": 0.4966887417218543,
"grad_norm": 0.8849487338293404,
"learning_rate": 5.930295934968197e-06,
"loss": 1.022,
"step": 825
},
{
"epoch": 0.4996989765201686,
"grad_norm": 1.3316984335165358,
"learning_rate": 5.878593260986256e-06,
"loss": 1.0175,
"step": 830
},
{
"epoch": 0.5027092113184829,
"grad_norm": 1.0767616487075078,
"learning_rate": 5.8267934639288525e-06,
"loss": 1.0413,
"step": 835
},
{
"epoch": 0.5057194461167971,
"grad_norm": 0.9217653620486148,
"learning_rate": 5.77490226994462e-06,
"loss": 1.0432,
"step": 840
},
{
"epoch": 0.5087296809151114,
"grad_norm": 0.9097013059314509,
"learning_rate": 5.722925415285555e-06,
"loss": 1.0502,
"step": 845
},
{
"epoch": 0.5117399157134257,
"grad_norm": 0.9555445158407673,
"learning_rate": 5.670868645672916e-06,
"loss": 1.0412,
"step": 850
},
{
"epoch": 0.5147501505117399,
"grad_norm": 0.9670792048708317,
"learning_rate": 5.618737715662067e-06,
"loss": 1.0271,
"step": 855
},
{
"epoch": 0.5177603853100542,
"grad_norm": 0.9197944356676366,
"learning_rate": 5.566538388006351e-06,
"loss": 1.0374,
"step": 860
},
{
"epoch": 0.5207706201083685,
"grad_norm": 0.8959294606541434,
"learning_rate": 5.514276433020044e-06,
"loss": 1.0223,
"step": 865
},
{
"epoch": 0.5237808549066827,
"grad_norm": 0.9227855823325931,
"learning_rate": 5.461957627940489e-06,
"loss": 1.0181,
"step": 870
},
{
"epoch": 0.526791089704997,
"grad_norm": 0.9045604297157106,
"learning_rate": 5.409587756289462e-06,
"loss": 1.0177,
"step": 875
},
{
"epoch": 0.5298013245033113,
"grad_norm": 0.900720782883793,
"learning_rate": 5.357172607233831e-06,
"loss": 1.0164,
"step": 880
},
{
"epoch": 0.5328115593016255,
"grad_norm": 0.9613635010205603,
"learning_rate": 5.304717974945596e-06,
"loss": 1.0199,
"step": 885
},
{
"epoch": 0.5358217940999398,
"grad_norm": 0.9280538923116405,
"learning_rate": 5.252229657961394e-06,
"loss": 1.0194,
"step": 890
},
{
"epoch": 0.5388320288982541,
"grad_norm": 0.8785440956248298,
"learning_rate": 5.199713458541495e-06,
"loss": 1.0332,
"step": 895
},
{
"epoch": 0.5418422636965683,
"grad_norm": 0.9081415986183222,
"learning_rate": 5.1471751820284e-06,
"loss": 1.0117,
"step": 900
},
{
"epoch": 0.5448524984948826,
"grad_norm": 0.8985002258714208,
"learning_rate": 5.094620636205096e-06,
"loss": 1.0076,
"step": 905
},
{
"epoch": 0.5478627332931969,
"grad_norm": 0.8895782291081366,
"learning_rate": 5.042055630653042e-06,
"loss": 1.0191,
"step": 910
},
{
"epoch": 0.5508729680915111,
"grad_norm": 0.9293164841008162,
"learning_rate": 4.98948597610996e-06,
"loss": 1.0101,
"step": 915
},
{
"epoch": 0.5538832028898254,
"grad_norm": 17.04597932345032,
"learning_rate": 4.936917483827483e-06,
"loss": 0.9983,
"step": 920
},
{
"epoch": 0.5568934376881397,
"grad_norm": 0.8910475336292096,
"learning_rate": 4.884355964928767e-06,
"loss": 1.0442,
"step": 925
},
{
"epoch": 0.5599036724864539,
"grad_norm": 0.8680011029175039,
"learning_rate": 4.831807229766101e-06,
"loss": 1.0035,
"step": 930
},
{
"epoch": 0.5629139072847682,
"grad_norm": 0.931237516861277,
"learning_rate": 4.779277087278615e-06,
"loss": 1.0285,
"step": 935
},
{
"epoch": 0.5659241420830825,
"grad_norm": 0.9231411428094562,
"learning_rate": 4.7267713443501274e-06,
"loss": 1.0377,
"step": 940
},
{
"epoch": 0.5689343768813967,
"grad_norm": 0.8658589610324005,
"learning_rate": 4.67429580516724e-06,
"loss": 1.0134,
"step": 945
},
{
"epoch": 0.571944611679711,
"grad_norm": 0.9419779516364959,
"learning_rate": 4.6218562705777185e-06,
"loss": 1.0126,
"step": 950
},
{
"epoch": 0.5749548464780253,
"grad_norm": 0.9566449713214615,
"learning_rate": 4.5694585374492314e-06,
"loss": 1.0228,
"step": 955
},
{
"epoch": 0.5779650812763396,
"grad_norm": 0.9543544333800563,
"learning_rate": 4.517108398028566e-06,
"loss": 1.04,
"step": 960
},
{
"epoch": 0.5809753160746538,
"grad_norm": 0.9091827723851609,
"learning_rate": 4.464811639301314e-06,
"loss": 1.0175,
"step": 965
},
{
"epoch": 0.5839855508729681,
"grad_norm": 22.159553622606854,
"learning_rate": 4.412574042352156e-06,
"loss": 1.0323,
"step": 970
},
{
"epoch": 0.5869957856712824,
"grad_norm": 0.912382771621538,
"learning_rate": 4.360401381725806e-06,
"loss": 1.0206,
"step": 975
},
{
"epoch": 0.5900060204695966,
"grad_norm": 0.8897218946404224,
"learning_rate": 4.308299424788667e-06,
"loss": 1.0349,
"step": 980
},
{
"epoch": 0.5930162552679109,
"grad_norm": 0.9205395645648624,
"learning_rate": 4.256273931091284e-06,
"loss": 1.0221,
"step": 985
},
{
"epoch": 0.5960264900662252,
"grad_norm": 0.8997013037962462,
"learning_rate": 4.204330651731662e-06,
"loss": 1.0169,
"step": 990
},
{
"epoch": 0.5990367248645394,
"grad_norm": 0.88917029228118,
"learning_rate": 4.152475328719517e-06,
"loss": 1.0186,
"step": 995
},
{
"epoch": 0.6020469596628537,
"grad_norm": 0.9250893662867846,
"learning_rate": 4.1007136943415325e-06,
"loss": 1.0057,
"step": 1000
},
{
"epoch": 0.605057194461168,
"grad_norm": 0.8603593665888702,
"learning_rate": 4.049051470527692e-06,
"loss": 1.0244,
"step": 1005
},
{
"epoch": 0.6080674292594822,
"grad_norm": 0.8966013371546253,
"learning_rate": 3.997494368218745e-06,
"loss": 1.0146,
"step": 1010
},
{
"epoch": 0.6110776640577965,
"grad_norm": 0.9014285122776863,
"learning_rate": 3.946048086734921e-06,
"loss": 1.033,
"step": 1015
},
{
"epoch": 0.6140878988561108,
"grad_norm": 0.938108665286018,
"learning_rate": 3.894718313145873e-06,
"loss": 1.0243,
"step": 1020
},
{
"epoch": 0.617098133654425,
"grad_norm": 1.250091206416041,
"learning_rate": 3.843510721642036e-06,
"loss": 1.0055,
"step": 1025
},
{
"epoch": 0.6201083684527393,
"grad_norm": 0.8858750851021894,
"learning_rate": 3.7924309729073616e-06,
"loss": 1.0143,
"step": 1030
},
{
"epoch": 0.6231186032510536,
"grad_norm": 0.9538435034591204,
"learning_rate": 3.7414847134935716e-06,
"loss": 1.0032,
"step": 1035
},
{
"epoch": 0.6261288380493678,
"grad_norm": 0.938247676145859,
"learning_rate": 3.6906775751959667e-06,
"loss": 1.0202,
"step": 1040
},
{
"epoch": 0.6291390728476821,
"grad_norm": 0.924707505710252,
"learning_rate": 3.640015174430864e-06,
"loss": 0.9974,
"step": 1045
},
{
"epoch": 0.6321493076459964,
"grad_norm": 0.8952719097489277,
"learning_rate": 3.5895031116147355e-06,
"loss": 1.0128,
"step": 1050
},
{
"epoch": 0.6351595424443106,
"grad_norm": 0.8993701279222203,
"learning_rate": 3.539146970545124e-06,
"loss": 1.0179,
"step": 1055
},
{
"epoch": 0.6381697772426249,
"grad_norm": 0.8755392901199769,
"learning_rate": 3.488952317783374e-06,
"loss": 1.0116,
"step": 1060
},
{
"epoch": 0.6411800120409392,
"grad_norm": 0.9058135143412757,
"learning_rate": 3.438924702039301e-06,
"loss": 1.0069,
"step": 1065
},
{
"epoch": 0.6441902468392534,
"grad_norm": 0.8659042642422232,
"learning_rate": 3.389069653557805e-06,
"loss": 1.0206,
"step": 1070
},
{
"epoch": 0.6472004816375677,
"grad_norm": 0.897110943379507,
"learning_rate": 3.3393926835075307e-06,
"loss": 1.0098,
"step": 1075
},
{
"epoch": 0.650210716435882,
"grad_norm": 0.9103411371697098,
"learning_rate": 3.289899283371657e-06,
"loss": 1.0011,
"step": 1080
},
{
"epoch": 0.6532209512341962,
"grad_norm": 0.9547731068418334,
"learning_rate": 3.240594924340835e-06,
"loss": 1.0077,
"step": 1085
},
{
"epoch": 0.6562311860325105,
"grad_norm": 0.8937676081109391,
"learning_rate": 3.1914850567083866e-06,
"loss": 1.0176,
"step": 1090
},
{
"epoch": 0.6592414208308248,
"grad_norm": 0.929138697588076,
"learning_rate": 3.1425751092678064e-06,
"loss": 1.0034,
"step": 1095
},
{
"epoch": 0.6622516556291391,
"grad_norm": 0.9210331479138841,
"learning_rate": 3.0938704887126425e-06,
"loss": 0.9913,
"step": 1100
},
{
"epoch": 0.6652618904274533,
"grad_norm": 0.8549140765541365,
"learning_rate": 3.045376579038821e-06,
"loss": 1.0005,
"step": 1105
},
{
"epoch": 0.6682721252257676,
"grad_norm": 0.870478554048657,
"learning_rate": 2.9970987409494784e-06,
"loss": 1.0373,
"step": 1110
},
{
"epoch": 0.6712823600240819,
"grad_norm": 0.8724631980574007,
"learning_rate": 2.9490423112623646e-06,
"loss": 1.0035,
"step": 1115
},
{
"epoch": 0.6742925948223961,
"grad_norm": 0.9494490020133225,
"learning_rate": 2.9012126023198973e-06,
"loss": 1.0152,
"step": 1120
},
{
"epoch": 0.6773028296207104,
"grad_norm": 0.8675243470717614,
"learning_rate": 2.853614901401909e-06,
"loss": 0.9851,
"step": 1125
},
{
"epoch": 0.6803130644190247,
"grad_norm": 0.9111265739825615,
"learning_rate": 2.806254470141174e-06,
"loss": 1.0211,
"step": 1130
},
{
"epoch": 0.6833232992173389,
"grad_norm": 0.8793180113511853,
"learning_rate": 2.759136543941773e-06,
"loss": 0.9824,
"step": 1135
},
{
"epoch": 0.6863335340156532,
"grad_norm": 0.9651914463110899,
"learning_rate": 2.712266331400332e-06,
"loss": 1.0008,
"step": 1140
},
{
"epoch": 0.6893437688139675,
"grad_norm": 0.8821546587242668,
"learning_rate": 2.66564901373027e-06,
"loss": 1.0115,
"step": 1145
},
{
"epoch": 0.6923540036122817,
"grad_norm": 0.914825675729504,
"learning_rate": 2.6192897441890337e-06,
"loss": 1.0025,
"step": 1150
},
{
"epoch": 0.695364238410596,
"grad_norm": 0.9051560102693067,
"learning_rate": 2.573193647508426e-06,
"loss": 1.0055,
"step": 1155
},
{
"epoch": 0.6983744732089103,
"grad_norm": 0.876626084266648,
"learning_rate": 2.5273658193281252e-06,
"loss": 0.9862,
"step": 1160
},
{
"epoch": 0.7013847080072245,
"grad_norm": 1.2625964082093335,
"learning_rate": 2.4818113256323745e-06,
"loss": 1.0011,
"step": 1165
},
{
"epoch": 0.7043949428055388,
"grad_norm": 0.9003047210639946,
"learning_rate": 2.4365352021899635e-06,
"loss": 1.0067,
"step": 1170
},
{
"epoch": 0.7074051776038531,
"grad_norm": 0.9392842028653241,
"learning_rate": 2.391542453997578e-06,
"loss": 0.9987,
"step": 1175
},
{
"epoch": 0.7104154124021673,
"grad_norm": 0.8898125521821807,
"learning_rate": 2.346838054726505e-06,
"loss": 1.0042,
"step": 1180
},
{
"epoch": 0.7134256472004816,
"grad_norm": 0.8839311598947895,
"learning_rate": 2.302426946172836e-06,
"loss": 1.0188,
"step": 1185
},
{
"epoch": 0.7164358819987959,
"grad_norm": 145.48082889915423,
"learning_rate": 2.258314037711184e-06,
"loss": 1.0078,
"step": 1190
},
{
"epoch": 0.7194461167971101,
"grad_norm": 0.9080284956087131,
"learning_rate": 2.214504205751971e-06,
"loss": 1.0252,
"step": 1195
},
{
"epoch": 0.7224563515954244,
"grad_norm": 0.8693176168542095,
"learning_rate": 2.1710022932023805e-06,
"loss": 1.0031,
"step": 1200
},
{
"epoch": 0.7254665863937387,
"grad_norm": 0.8945645392796941,
"learning_rate": 2.127813108931007e-06,
"loss": 1.0088,
"step": 1205
},
{
"epoch": 0.7284768211920529,
"grad_norm": 0.8743665598111865,
"learning_rate": 2.084941427236245e-06,
"loss": 1.0162,
"step": 1210
},
{
"epoch": 0.7314870559903672,
"grad_norm": 0.9349057343090031,
"learning_rate": 2.04239198731855e-06,
"loss": 0.9996,
"step": 1215
},
{
"epoch": 0.7344972907886815,
"grad_norm": 7.756856106473671,
"learning_rate": 2.000169492756523e-06,
"loss": 1.0133,
"step": 1220
},
{
"epoch": 0.7375075255869958,
"grad_norm": 8.589389041141127,
"learning_rate": 1.9582786109869713e-06,
"loss": 1.0165,
"step": 1225
},
{
"epoch": 0.74051776038531,
"grad_norm": 0.9692270078720051,
"learning_rate": 1.9167239727889527e-06,
"loss": 1.0104,
"step": 1230
},
{
"epoch": 0.7435279951836243,
"grad_norm": 0.8931619647840838,
"learning_rate": 1.875510171771865e-06,
"loss": 0.9945,
"step": 1235
},
{
"epoch": 0.7465382299819386,
"grad_norm": 0.9061163884083135,
"learning_rate": 1.8346417638676533e-06,
"loss": 0.9846,
"step": 1240
},
{
"epoch": 0.7495484647802528,
"grad_norm": 0.9440025241822538,
"learning_rate": 1.7941232668271863e-06,
"loss": 1.0107,
"step": 1245
},
{
"epoch": 0.7525586995785671,
"grad_norm": 7.949070459581915,
"learning_rate": 1.753959159720836e-06,
"loss": 1.0021,
"step": 1250
},
{
"epoch": 0.7555689343768814,
"grad_norm": 0.8900575327837316,
"learning_rate": 1.7141538824433506e-06,
"loss": 1.0083,
"step": 1255
},
{
"epoch": 0.7585791691751956,
"grad_norm": 0.9567537974389942,
"learning_rate": 1.6747118352230495e-06,
"loss": 1.0064,
"step": 1260
},
{
"epoch": 0.7615894039735099,
"grad_norm": 0.855175568298549,
"learning_rate": 1.6356373781354058e-06,
"loss": 0.9821,
"step": 1265
},
{
"epoch": 0.7645996387718242,
"grad_norm": 0.8869389537440323,
"learning_rate": 1.5969348306210692e-06,
"loss": 1.0033,
"step": 1270
},
{
"epoch": 0.7676098735701384,
"grad_norm": 0.8686541101196223,
"learning_rate": 1.5586084710083737e-06,
"loss": 1.0097,
"step": 1275
},
{
"epoch": 0.7706201083684527,
"grad_norm": 0.8853773671756917,
"learning_rate": 1.5206625360403943e-06,
"loss": 1.002,
"step": 1280
},
{
"epoch": 0.773630343166767,
"grad_norm": 9.276510463559799,
"learning_rate": 1.4831012204066114e-06,
"loss": 1.0198,
"step": 1285
},
{
"epoch": 0.7766405779650812,
"grad_norm": 0.8952189934733595,
"learning_rate": 1.445928676279199e-06,
"loss": 0.9915,
"step": 1290
},
{
"epoch": 0.7796508127633955,
"grad_norm": 1.4459667679766754,
"learning_rate": 1.4091490128540374e-06,
"loss": 0.9837,
"step": 1295
},
{
"epoch": 0.7826610475617098,
"grad_norm": 0.8802348291910782,
"learning_rate": 1.3727662958964627e-06,
"loss": 1.0001,
"step": 1300
},
{
"epoch": 0.785671282360024,
"grad_norm": 1.0156972499266355,
"learning_rate": 1.3367845472918272e-06,
"loss": 1.0072,
"step": 1305
},
{
"epoch": 0.7886815171583383,
"grad_norm": 0.9314514114707518,
"learning_rate": 1.3012077446008969e-06,
"loss": 1.0011,
"step": 1310
},
{
"epoch": 0.7916917519566526,
"grad_norm": 0.8924087794263724,
"learning_rate": 1.266039820620159e-06,
"loss": 1.0055,
"step": 1315
},
{
"epoch": 0.7947019867549668,
"grad_norm": 0.9131510130376783,
"learning_rate": 1.2312846629470826e-06,
"loss": 0.9894,
"step": 1320
},
{
"epoch": 0.7977122215532811,
"grad_norm": 0.9718957955204853,
"learning_rate": 1.1969461135503573e-06,
"loss": 1.0058,
"step": 1325
},
{
"epoch": 0.8007224563515954,
"grad_norm": 0.8842617855041038,
"learning_rate": 1.163027968345195e-06,
"loss": 1.0088,
"step": 1330
},
{
"epoch": 0.8037326911499096,
"grad_norm": 1.0150004217582238,
"learning_rate": 1.1295339767737125e-06,
"loss": 0.9919,
"step": 1335
},
{
"epoch": 0.8067429259482239,
"grad_norm": 9.91219637819699,
"learning_rate": 1.0964678413904529e-06,
"loss": 1.0052,
"step": 1340
},
{
"epoch": 0.8097531607465382,
"grad_norm": 0.9243390152675313,
"learning_rate": 1.0638332174530953e-06,
"loss": 1.0121,
"step": 1345
},
{
"epoch": 0.8127633955448526,
"grad_norm": 0.9013142550026193,
"learning_rate": 1.0316337125183817e-06,
"loss": 0.9996,
"step": 1350
},
{
"epoch": 0.8157736303431667,
"grad_norm": 0.8910284877524084,
"learning_rate": 9.998728860433277e-07,
"loss": 1.0004,
"step": 1355
},
{
"epoch": 0.818783865141481,
"grad_norm": 0.8798186735341214,
"learning_rate": 9.685542489917494e-07,
"loss": 0.982,
"step": 1360
},
{
"epoch": 0.8217940999397954,
"grad_norm": 0.9826761795502499,
"learning_rate": 9.376812634461418e-07,
"loss": 0.9933,
"step": 1365
},
{
"epoch": 0.8248043347381095,
"grad_norm": 0.9100156607261772,
"learning_rate": 9.072573422249692e-07,
"loss": 1.016,
"step": 1370
},
{
"epoch": 0.8278145695364238,
"grad_norm": 0.8713258214258187,
"learning_rate": 8.772858485054042e-07,
"loss": 1.0055,
"step": 1375
},
{
"epoch": 0.8308248043347382,
"grad_norm": 0.8891360404790082,
"learning_rate": 8.477700954515372e-07,
"loss": 1.004,
"step": 1380
},
{
"epoch": 0.8338350391330523,
"grad_norm": 0.9873027733998953,
"learning_rate": 8.187133458481416e-07,
"loss": 1.0047,
"step": 1385
},
{
"epoch": 0.8368452739313667,
"grad_norm": 0.9173976826216069,
"learning_rate": 7.901188117399817e-07,
"loss": 1.0141,
"step": 1390
},
{
"epoch": 0.839855508729681,
"grad_norm": 0.8933319303624291,
"learning_rate": 7.619896540767435e-07,
"loss": 1.0147,
"step": 1395
},
{
"epoch": 0.8428657435279951,
"grad_norm": 0.8820123957581391,
"learning_rate": 7.343289823636168e-07,
"loss": 1.0121,
"step": 1400
},
{
"epoch": 0.8458759783263095,
"grad_norm": 1.1417427890515452,
"learning_rate": 7.0713985431755e-07,
"loss": 1.0102,
"step": 1405
},
{
"epoch": 0.8488862131246238,
"grad_norm": 2.2006862542879766,
"learning_rate": 6.804252755292429e-07,
"loss": 0.981,
"step": 1410
},
{
"epoch": 0.851896447922938,
"grad_norm": 0.892688239858576,
"learning_rate": 6.541881991309013e-07,
"loss": 0.9994,
"step": 1415
},
{
"epoch": 0.8549066827212523,
"grad_norm": 0.8851152482079784,
"learning_rate": 6.284315254697726e-07,
"loss": 1.0055,
"step": 1420
},
{
"epoch": 0.8579169175195666,
"grad_norm": 4.1376088974160705,
"learning_rate": 6.031581017875482e-07,
"loss": 1.0114,
"step": 1425
},
{
"epoch": 0.8609271523178808,
"grad_norm": 1.0742418914591978,
"learning_rate": 5.783707219056078e-07,
"loss": 1.0009,
"step": 1430
},
{
"epoch": 0.863937387116195,
"grad_norm": 51.72308481458701,
"learning_rate": 5.540721259161774e-07,
"loss": 1.0091,
"step": 1435
},
{
"epoch": 0.8669476219145094,
"grad_norm": 0.9187887516758119,
"learning_rate": 5.302649998794368e-07,
"loss": 1.0122,
"step": 1440
},
{
"epoch": 0.8699578567128236,
"grad_norm": 0.998724825032228,
"learning_rate": 5.0695197552659e-07,
"loss": 0.9906,
"step": 1445
},
{
"epoch": 0.8729680915111379,
"grad_norm": 3.453765069147144,
"learning_rate": 4.841356299689359e-07,
"loss": 1.0037,
"step": 1450
},
{
"epoch": 0.8759783263094522,
"grad_norm": 0.9196156863617485,
"learning_rate": 4.618184854129981e-07,
"loss": 1.013,
"step": 1455
},
{
"epoch": 0.8789885611077664,
"grad_norm": 0.8776281392555243,
"learning_rate": 4.4000300888169753e-07,
"loss": 1.0023,
"step": 1460
},
{
"epoch": 0.8819987959060807,
"grad_norm": 1.2343861844915518,
"learning_rate": 4.1869161194164565e-07,
"loss": 1.005,
"step": 1465
},
{
"epoch": 0.885009030704395,
"grad_norm": 3.1950889114793317,
"learning_rate": 3.9788665043656083e-07,
"loss": 0.9983,
"step": 1470
},
{
"epoch": 0.8880192655027093,
"grad_norm": 0.8997540101420374,
"learning_rate": 3.775904242268391e-07,
"loss": 1.0236,
"step": 1475
},
{
"epoch": 0.8910295003010235,
"grad_norm": 0.8874735736481768,
"learning_rate": 3.578051769353219e-07,
"loss": 1.0125,
"step": 1480
},
{
"epoch": 0.8940397350993378,
"grad_norm": 0.8827860731362758,
"learning_rate": 3.385330956992816e-07,
"loss": 1.0003,
"step": 1485
},
{
"epoch": 0.8970499698976521,
"grad_norm": 0.9108633520843697,
"learning_rate": 3.1977631092863613e-07,
"loss": 1.0042,
"step": 1490
},
{
"epoch": 0.9000602046959663,
"grad_norm": 0.8897735803071378,
"learning_rate": 3.015368960704584e-07,
"loss": 0.9873,
"step": 1495
},
{
"epoch": 0.9030704394942806,
"grad_norm": 0.8716202792511029,
"learning_rate": 2.8381686737975867e-07,
"loss": 1.0115,
"step": 1500
},
{
"epoch": 0.9060806742925949,
"grad_norm": 0.9263815301734902,
"learning_rate": 2.666181836966053e-07,
"loss": 1.0149,
"step": 1505
},
{
"epoch": 0.9090909090909091,
"grad_norm": 0.9028381235449051,
"learning_rate": 2.4994274622958726e-07,
"loss": 1.0137,
"step": 1510
},
{
"epoch": 0.9121011438892234,
"grad_norm": 0.8952393788798627,
"learning_rate": 2.3379239834564526e-07,
"loss": 1.0052,
"step": 1515
},
{
"epoch": 0.9151113786875377,
"grad_norm": 6.643467281200084,
"learning_rate": 2.1816892536629775e-07,
"loss": 1.0129,
"step": 1520
},
{
"epoch": 0.9181216134858519,
"grad_norm": 1.6442311604186386,
"learning_rate": 2.0307405437029027e-07,
"loss": 0.9973,
"step": 1525
},
{
"epoch": 0.9211318482841662,
"grad_norm": 1.0659077620293165,
"learning_rate": 1.8850945400266994e-07,
"loss": 1.0181,
"step": 1530
},
{
"epoch": 0.9241420830824805,
"grad_norm": 4.3859143955880855,
"learning_rate": 1.7447673429033361e-07,
"loss": 0.9964,
"step": 1535
},
{
"epoch": 0.9271523178807947,
"grad_norm": 0.8853862484020693,
"learning_rate": 1.6097744646404457e-07,
"loss": 0.9963,
"step": 1540
},
{
"epoch": 0.930162552679109,
"grad_norm": 0.888640349391396,
"learning_rate": 1.4801308278695636e-07,
"loss": 1.0036,
"step": 1545
},
{
"epoch": 0.9331727874774233,
"grad_norm": 0.8933149209877544,
"learning_rate": 1.3558507638965158e-07,
"loss": 1.0066,
"step": 1550
},
{
"epoch": 0.9361830222757375,
"grad_norm": 0.9023917136128728,
"learning_rate": 1.2369480111171784e-07,
"loss": 1.0068,
"step": 1555
},
{
"epoch": 0.9391932570740518,
"grad_norm": 7.92138822902921,
"learning_rate": 1.1234357134987717e-07,
"loss": 0.9898,
"step": 1560
},
{
"epoch": 0.9422034918723661,
"grad_norm": 0.8873656023063873,
"learning_rate": 1.0153264191269052e-07,
"loss": 1.0099,
"step": 1565
},
{
"epoch": 0.9452137266706803,
"grad_norm": 1.7635058125000826,
"learning_rate": 9.126320788184374e-08,
"loss": 0.9943,
"step": 1570
},
{
"epoch": 0.9482239614689946,
"grad_norm": 1.1380512050692657,
"learning_rate": 8.153640448003875e-08,
"loss": 0.9869,
"step": 1575
},
{
"epoch": 0.9512341962673089,
"grad_norm": 1.0730720715487758,
"learning_rate": 7.235330694550402e-08,
"loss": 1.0157,
"step": 1580
},
{
"epoch": 0.9542444310656231,
"grad_norm": 0.8758058611438886,
"learning_rate": 6.371493041313126e-08,
"loss": 0.9904,
"step": 1585
},
{
"epoch": 0.9572546658639374,
"grad_norm": 0.862795236731348,
"learning_rate": 5.562222980225907e-08,
"loss": 0.9965,
"step": 1590
},
{
"epoch": 0.9602649006622517,
"grad_norm": 0.8490275312725396,
"learning_rate": 4.807609971111238e-08,
"loss": 0.9898,
"step": 1595
},
{
"epoch": 0.963275135460566,
"grad_norm": 0.8719190026799012,
"learning_rate": 4.107737431791159e-08,
"loss": 1.0197,
"step": 1600
},
{
"epoch": 0.9662853702588802,
"grad_norm": 0.9849558439277634,
"learning_rate": 3.462682728865685e-08,
"loss": 0.999,
"step": 1605
},
{
"epoch": 0.9692956050571945,
"grad_norm": 0.9240673817719144,
"learning_rate": 2.8725171691605934e-08,
"loss": 1.019,
"step": 1610
},
{
"epoch": 0.9723058398555088,
"grad_norm": 0.8711151381569093,
"learning_rate": 2.3373059918448958e-08,
"loss": 1.0001,
"step": 1615
},
{
"epoch": 0.975316074653823,
"grad_norm": 0.9138380625875624,
"learning_rate": 1.8571083612188845e-08,
"loss": 1.0099,
"step": 1620
},
{
"epoch": 0.9783263094521373,
"grad_norm": 0.9312600529557169,
"learning_rate": 1.431977360173975e-08,
"loss": 0.9951,
"step": 1625
},
{
"epoch": 0.9813365442504516,
"grad_norm": 0.9023080871350866,
"learning_rate": 1.0619599843249006e-08,
"loss": 1.0015,
"step": 1630
},
{
"epoch": 0.9843467790487658,
"grad_norm": 16.22001339030287,
"learning_rate": 7.470971368142011e-09,
"loss": 1.0138,
"step": 1635
},
{
"epoch": 0.9873570138470801,
"grad_norm": 2.544390586510872,
"learning_rate": 4.874236237911723e-09,
"loss": 1.0043,
"step": 1640
},
{
"epoch": 0.9903672486453944,
"grad_norm": 1.1347495534779806,
"learning_rate": 2.8296815056377824e-09,
"loss": 1.0145,
"step": 1645
},
{
"epoch": 0.9933774834437086,
"grad_norm": 0.9810788936521698,
"learning_rate": 1.3375331842574446e-09,
"loss": 1.034,
"step": 1650
},
{
"epoch": 0.9963877182420229,
"grad_norm": 1.4553563885174292,
"learning_rate": 3.9795622158111945e-10,
"loss": 1.0076,
"step": 1655
},
{
"epoch": 0.9993979530403372,
"grad_norm": 6.68038526442591,
"learning_rate": 1.1054482056405136e-11,
"loss": 1.0027,
"step": 1660
},
{
"epoch": 1.0,
"eval_runtime": 0.8643,
"eval_samples_per_second": 11.57,
"eval_steps_per_second": 3.471,
"step": 1661
},
{
"epoch": 1.0,
"step": 1661,
"total_flos": 80925639573504.0,
"train_loss": 1.0962458014272911,
"train_runtime": 1740.9834,
"train_samples_per_second": 15.263,
"train_steps_per_second": 0.954
}
],
"logging_steps": 5,
"max_steps": 1661,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 80925639573504.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}