v0.8u-adapter / checkpoint-950 /trainer_state.json
gotzmann's picture
..
daeea4a
raw
history blame
No virus
152 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.251994960100798,
"eval_steps": 500,
"global_step": 950,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 0.4196510314941406,
"learning_rate": 3.7037037037037036e-07,
"loss": 1.4334,
"step": 1
},
{
"epoch": 0.0,
"grad_norm": 0.44790390133857727,
"learning_rate": 7.407407407407407e-07,
"loss": 1.4029,
"step": 2
},
{
"epoch": 0.01,
"grad_norm": 0.439674973487854,
"learning_rate": 1.111111111111111e-06,
"loss": 1.461,
"step": 3
},
{
"epoch": 0.01,
"grad_norm": 0.43389058113098145,
"learning_rate": 1.4814814814814815e-06,
"loss": 1.4012,
"step": 4
},
{
"epoch": 0.01,
"grad_norm": 0.5680592060089111,
"learning_rate": 1.8518518518518519e-06,
"loss": 1.445,
"step": 5
},
{
"epoch": 0.01,
"grad_norm": 0.3744407296180725,
"learning_rate": 2.222222222222222e-06,
"loss": 1.3717,
"step": 6
},
{
"epoch": 0.01,
"grad_norm": 0.36713480949401855,
"learning_rate": 2.5925925925925925e-06,
"loss": 1.4689,
"step": 7
},
{
"epoch": 0.01,
"grad_norm": 0.32056745886802673,
"learning_rate": 2.962962962962963e-06,
"loss": 1.4354,
"step": 8
},
{
"epoch": 0.02,
"grad_norm": 0.3159842789173126,
"learning_rate": 3.3333333333333333e-06,
"loss": 1.4229,
"step": 9
},
{
"epoch": 0.02,
"grad_norm": 0.2807372510433197,
"learning_rate": 3.7037037037037037e-06,
"loss": 1.3873,
"step": 10
},
{
"epoch": 0.02,
"grad_norm": 0.27728646993637085,
"learning_rate": 4.074074074074074e-06,
"loss": 1.3989,
"step": 11
},
{
"epoch": 0.02,
"grad_norm": 0.280068039894104,
"learning_rate": 4.444444444444444e-06,
"loss": 1.4782,
"step": 12
},
{
"epoch": 0.02,
"grad_norm": 0.236131489276886,
"learning_rate": 4.814814814814815e-06,
"loss": 1.3919,
"step": 13
},
{
"epoch": 0.02,
"grad_norm": 0.2324475198984146,
"learning_rate": 5.185185185185185e-06,
"loss": 1.483,
"step": 14
},
{
"epoch": 0.03,
"grad_norm": 0.27377304434776306,
"learning_rate": 5.555555555555557e-06,
"loss": 1.3518,
"step": 15
},
{
"epoch": 0.03,
"grad_norm": 0.23332859575748444,
"learning_rate": 5.925925925925926e-06,
"loss": 1.3781,
"step": 16
},
{
"epoch": 0.03,
"grad_norm": 0.24840319156646729,
"learning_rate": 6.296296296296297e-06,
"loss": 1.3759,
"step": 17
},
{
"epoch": 0.03,
"grad_norm": 0.22274506092071533,
"learning_rate": 6.666666666666667e-06,
"loss": 1.3676,
"step": 18
},
{
"epoch": 0.03,
"grad_norm": 0.29411837458610535,
"learning_rate": 7.0370370370370375e-06,
"loss": 1.4396,
"step": 19
},
{
"epoch": 0.03,
"grad_norm": 0.292468786239624,
"learning_rate": 7.4074074074074075e-06,
"loss": 1.3571,
"step": 20
},
{
"epoch": 0.04,
"grad_norm": 0.22091880440711975,
"learning_rate": 7.77777777777778e-06,
"loss": 1.3387,
"step": 21
},
{
"epoch": 0.04,
"grad_norm": 0.21235232055187225,
"learning_rate": 8.148148148148148e-06,
"loss": 1.3793,
"step": 22
},
{
"epoch": 0.04,
"grad_norm": 0.23756876587867737,
"learning_rate": 8.518518518518519e-06,
"loss": 1.4069,
"step": 23
},
{
"epoch": 0.04,
"grad_norm": 0.21084965765476227,
"learning_rate": 8.888888888888888e-06,
"loss": 1.3839,
"step": 24
},
{
"epoch": 0.04,
"grad_norm": 0.21281524002552032,
"learning_rate": 9.25925925925926e-06,
"loss": 1.4341,
"step": 25
},
{
"epoch": 0.04,
"grad_norm": 0.21351666748523712,
"learning_rate": 9.62962962962963e-06,
"loss": 1.3967,
"step": 26
},
{
"epoch": 0.05,
"grad_norm": 0.21163725852966309,
"learning_rate": 1e-05,
"loss": 1.3992,
"step": 27
},
{
"epoch": 0.05,
"grad_norm": 0.2063266783952713,
"learning_rate": 1.037037037037037e-05,
"loss": 1.3997,
"step": 28
},
{
"epoch": 0.05,
"grad_norm": 0.2602396607398987,
"learning_rate": 1.0740740740740742e-05,
"loss": 1.3798,
"step": 29
},
{
"epoch": 0.05,
"grad_norm": 0.20360125601291656,
"learning_rate": 1.1111111111111113e-05,
"loss": 1.385,
"step": 30
},
{
"epoch": 0.05,
"grad_norm": 0.2125891149044037,
"learning_rate": 1.1481481481481482e-05,
"loss": 1.4193,
"step": 31
},
{
"epoch": 0.05,
"grad_norm": 0.21324987709522247,
"learning_rate": 1.1851851851851852e-05,
"loss": 1.3213,
"step": 32
},
{
"epoch": 0.06,
"grad_norm": 0.1991666704416275,
"learning_rate": 1.2222222222222224e-05,
"loss": 1.3575,
"step": 33
},
{
"epoch": 0.06,
"grad_norm": 0.22300179302692413,
"learning_rate": 1.2592592592592593e-05,
"loss": 1.4578,
"step": 34
},
{
"epoch": 0.06,
"grad_norm": 0.20070216059684753,
"learning_rate": 1.2962962962962964e-05,
"loss": 1.412,
"step": 35
},
{
"epoch": 0.06,
"grad_norm": 0.1976468563079834,
"learning_rate": 1.3333333333333333e-05,
"loss": 1.3627,
"step": 36
},
{
"epoch": 0.06,
"grad_norm": 0.19419902563095093,
"learning_rate": 1.3703703703703706e-05,
"loss": 1.3384,
"step": 37
},
{
"epoch": 0.06,
"grad_norm": 0.19124644994735718,
"learning_rate": 1.4074074074074075e-05,
"loss": 1.3381,
"step": 38
},
{
"epoch": 0.07,
"grad_norm": 0.21110524237155914,
"learning_rate": 1.4444444444444446e-05,
"loss": 1.4208,
"step": 39
},
{
"epoch": 0.07,
"grad_norm": 0.1895744502544403,
"learning_rate": 1.4814814814814815e-05,
"loss": 1.3003,
"step": 40
},
{
"epoch": 0.07,
"grad_norm": 0.20592844486236572,
"learning_rate": 1.5185185185185187e-05,
"loss": 1.3216,
"step": 41
},
{
"epoch": 0.07,
"grad_norm": 0.20679640769958496,
"learning_rate": 1.555555555555556e-05,
"loss": 1.4045,
"step": 42
},
{
"epoch": 0.07,
"grad_norm": 0.19197501242160797,
"learning_rate": 1.5925925925925926e-05,
"loss": 1.4138,
"step": 43
},
{
"epoch": 0.07,
"grad_norm": 0.32921022176742554,
"learning_rate": 1.6296296296296297e-05,
"loss": 1.3683,
"step": 44
},
{
"epoch": 0.08,
"grad_norm": 0.21861666440963745,
"learning_rate": 1.6666666666666667e-05,
"loss": 1.3522,
"step": 45
},
{
"epoch": 0.08,
"grad_norm": 0.1850321739912033,
"learning_rate": 1.7037037037037038e-05,
"loss": 1.293,
"step": 46
},
{
"epoch": 0.08,
"grad_norm": 0.36730220913887024,
"learning_rate": 1.740740740740741e-05,
"loss": 1.4122,
"step": 47
},
{
"epoch": 0.08,
"grad_norm": 0.1949268877506256,
"learning_rate": 1.7777777777777777e-05,
"loss": 1.4002,
"step": 48
},
{
"epoch": 0.08,
"grad_norm": 0.20806071162223816,
"learning_rate": 1.814814814814815e-05,
"loss": 1.3386,
"step": 49
},
{
"epoch": 0.08,
"grad_norm": 0.2157718390226364,
"learning_rate": 1.851851851851852e-05,
"loss": 1.3323,
"step": 50
},
{
"epoch": 0.09,
"grad_norm": 0.19431230425834656,
"learning_rate": 1.888888888888889e-05,
"loss": 1.3844,
"step": 51
},
{
"epoch": 0.09,
"grad_norm": 0.20404817163944244,
"learning_rate": 1.925925925925926e-05,
"loss": 1.2329,
"step": 52
},
{
"epoch": 0.09,
"grad_norm": 0.18933957815170288,
"learning_rate": 1.962962962962963e-05,
"loss": 1.2489,
"step": 53
},
{
"epoch": 0.09,
"grad_norm": 0.21972468495368958,
"learning_rate": 2e-05,
"loss": 1.3139,
"step": 54
},
{
"epoch": 0.09,
"grad_norm": 0.2144392728805542,
"learning_rate": 1.999998353068657e-05,
"loss": 1.3223,
"step": 55
},
{
"epoch": 0.09,
"grad_norm": 0.335573673248291,
"learning_rate": 1.9999934122800527e-05,
"loss": 1.3846,
"step": 56
},
{
"epoch": 0.1,
"grad_norm": 0.19709603488445282,
"learning_rate": 1.9999851776504614e-05,
"loss": 1.4115,
"step": 57
},
{
"epoch": 0.1,
"grad_norm": 0.20725642144680023,
"learning_rate": 1.9999736492070065e-05,
"loss": 1.3915,
"step": 58
},
{
"epoch": 0.1,
"grad_norm": 0.2252136766910553,
"learning_rate": 1.9999588269876614e-05,
"loss": 1.3825,
"step": 59
},
{
"epoch": 0.1,
"grad_norm": 0.18642185628414154,
"learning_rate": 1.999940711041249e-05,
"loss": 1.341,
"step": 60
},
{
"epoch": 0.1,
"grad_norm": 0.2533683180809021,
"learning_rate": 1.9999193014274393e-05,
"loss": 1.3444,
"step": 61
},
{
"epoch": 0.1,
"grad_norm": 0.21690671145915985,
"learning_rate": 1.9998945982167542e-05,
"loss": 1.344,
"step": 62
},
{
"epoch": 0.11,
"grad_norm": 0.19395007193088531,
"learning_rate": 1.9998666014905617e-05,
"loss": 1.3215,
"step": 63
},
{
"epoch": 0.11,
"grad_norm": 0.2266397327184677,
"learning_rate": 1.9998353113410796e-05,
"loss": 1.4338,
"step": 64
},
{
"epoch": 0.11,
"grad_norm": 0.19096337258815765,
"learning_rate": 1.999800727871373e-05,
"loss": 1.394,
"step": 65
},
{
"epoch": 0.11,
"grad_norm": 0.17608919739723206,
"learning_rate": 1.9997628511953554e-05,
"loss": 1.3472,
"step": 66
},
{
"epoch": 0.11,
"grad_norm": 0.19728903472423553,
"learning_rate": 1.9997216814377875e-05,
"loss": 1.3919,
"step": 67
},
{
"epoch": 0.11,
"grad_norm": 0.2579798102378845,
"learning_rate": 1.9996772187342764e-05,
"loss": 1.3303,
"step": 68
},
{
"epoch": 0.12,
"grad_norm": 0.1914272904396057,
"learning_rate": 1.9996294632312766e-05,
"loss": 1.3093,
"step": 69
},
{
"epoch": 0.12,
"grad_norm": 0.18615896999835968,
"learning_rate": 1.999578415086088e-05,
"loss": 1.4049,
"step": 70
},
{
"epoch": 0.12,
"grad_norm": 0.18626686930656433,
"learning_rate": 1.9995240744668555e-05,
"loss": 1.2712,
"step": 71
},
{
"epoch": 0.12,
"grad_norm": 0.18953536450862885,
"learning_rate": 1.999466441552571e-05,
"loss": 1.311,
"step": 72
},
{
"epoch": 0.12,
"grad_norm": 0.20964953303337097,
"learning_rate": 1.9994055165330683e-05,
"loss": 1.383,
"step": 73
},
{
"epoch": 0.12,
"grad_norm": 1.15555739402771,
"learning_rate": 1.9993412996090265e-05,
"loss": 1.2798,
"step": 74
},
{
"epoch": 0.13,
"grad_norm": 0.18615145981311798,
"learning_rate": 1.9992737909919672e-05,
"loss": 1.3288,
"step": 75
},
{
"epoch": 0.13,
"grad_norm": 0.17394311726093292,
"learning_rate": 1.9992029909042542e-05,
"loss": 1.3833,
"step": 76
},
{
"epoch": 0.13,
"grad_norm": 0.2260170429944992,
"learning_rate": 1.999128899579094e-05,
"loss": 1.3483,
"step": 77
},
{
"epoch": 0.13,
"grad_norm": 0.18870756030082703,
"learning_rate": 1.999051517260533e-05,
"loss": 1.3463,
"step": 78
},
{
"epoch": 0.13,
"grad_norm": 0.17621129751205444,
"learning_rate": 1.9989708442034573e-05,
"loss": 1.2911,
"step": 79
},
{
"epoch": 0.13,
"grad_norm": 0.19401304423809052,
"learning_rate": 1.9988868806735938e-05,
"loss": 1.2942,
"step": 80
},
{
"epoch": 0.14,
"grad_norm": 0.21740129590034485,
"learning_rate": 1.9987996269475063e-05,
"loss": 1.3842,
"step": 81
},
{
"epoch": 0.14,
"grad_norm": 0.24701721966266632,
"learning_rate": 1.9987090833125967e-05,
"loss": 1.4095,
"step": 82
},
{
"epoch": 0.14,
"grad_norm": 0.18182536959648132,
"learning_rate": 1.998615250067103e-05,
"loss": 1.3559,
"step": 83
},
{
"epoch": 0.14,
"grad_norm": 0.31561610102653503,
"learning_rate": 1.9985181275201e-05,
"loss": 1.3771,
"step": 84
},
{
"epoch": 0.14,
"grad_norm": 0.21156860888004303,
"learning_rate": 1.998417715991495e-05,
"loss": 1.3526,
"step": 85
},
{
"epoch": 0.14,
"grad_norm": 0.251007080078125,
"learning_rate": 1.9983140158120304e-05,
"loss": 1.3858,
"step": 86
},
{
"epoch": 0.15,
"grad_norm": 0.18290047347545624,
"learning_rate": 1.9982070273232796e-05,
"loss": 1.2914,
"step": 87
},
{
"epoch": 0.15,
"grad_norm": 0.18279613554477692,
"learning_rate": 1.9980967508776486e-05,
"loss": 1.2838,
"step": 88
},
{
"epoch": 0.15,
"grad_norm": 0.26361390948295593,
"learning_rate": 1.997983186838373e-05,
"loss": 1.4331,
"step": 89
},
{
"epoch": 0.15,
"grad_norm": 0.2679239511489868,
"learning_rate": 1.9978663355795167e-05,
"loss": 1.339,
"step": 90
},
{
"epoch": 0.15,
"grad_norm": 0.1776888072490692,
"learning_rate": 1.997746197485972e-05,
"loss": 1.3149,
"step": 91
},
{
"epoch": 0.15,
"grad_norm": 0.19774815440177917,
"learning_rate": 1.997622772953457e-05,
"loss": 1.3608,
"step": 92
},
{
"epoch": 0.16,
"grad_norm": 0.1756751984357834,
"learning_rate": 1.9974960623885158e-05,
"loss": 1.3813,
"step": 93
},
{
"epoch": 0.16,
"grad_norm": 0.17813633382320404,
"learning_rate": 1.9973660662085152e-05,
"loss": 1.3573,
"step": 94
},
{
"epoch": 0.16,
"grad_norm": 0.17685261368751526,
"learning_rate": 1.9972327848416442e-05,
"loss": 1.318,
"step": 95
},
{
"epoch": 0.16,
"grad_norm": 0.16881057620048523,
"learning_rate": 1.9970962187269144e-05,
"loss": 1.3384,
"step": 96
},
{
"epoch": 0.16,
"grad_norm": 0.1850956529378891,
"learning_rate": 1.996956368314155e-05,
"loss": 1.3708,
"step": 97
},
{
"epoch": 0.16,
"grad_norm": 0.1764199137687683,
"learning_rate": 1.996813234064014e-05,
"loss": 1.3025,
"step": 98
},
{
"epoch": 0.17,
"grad_norm": 0.1872323602437973,
"learning_rate": 1.9966668164479567e-05,
"loss": 1.3671,
"step": 99
},
{
"epoch": 0.17,
"grad_norm": 0.15457874536514282,
"learning_rate": 1.9965171159482623e-05,
"loss": 1.3341,
"step": 100
},
{
"epoch": 0.17,
"grad_norm": 0.18440444767475128,
"learning_rate": 1.9963641330580237e-05,
"loss": 1.3263,
"step": 101
},
{
"epoch": 0.17,
"grad_norm": 0.17000797390937805,
"learning_rate": 1.996207868281145e-05,
"loss": 1.3522,
"step": 102
},
{
"epoch": 0.17,
"grad_norm": 0.1682623326778412,
"learning_rate": 1.9960483221323417e-05,
"loss": 1.3363,
"step": 103
},
{
"epoch": 0.17,
"grad_norm": 0.16398635506629944,
"learning_rate": 1.9958854951371364e-05,
"loss": 1.297,
"step": 104
},
{
"epoch": 0.18,
"grad_norm": 0.16423435509204865,
"learning_rate": 1.9957193878318594e-05,
"loss": 1.3561,
"step": 105
},
{
"epoch": 0.18,
"grad_norm": 0.16171465814113617,
"learning_rate": 1.9955500007636445e-05,
"loss": 1.4085,
"step": 106
},
{
"epoch": 0.18,
"grad_norm": 0.17358383536338806,
"learning_rate": 1.9953773344904303e-05,
"loss": 1.3456,
"step": 107
},
{
"epoch": 0.18,
"grad_norm": 0.16697920858860016,
"learning_rate": 1.995201389580955e-05,
"loss": 1.2939,
"step": 108
},
{
"epoch": 0.18,
"grad_norm": 0.17273786664009094,
"learning_rate": 1.995022166614758e-05,
"loss": 1.2898,
"step": 109
},
{
"epoch": 0.18,
"grad_norm": 0.15889504551887512,
"learning_rate": 1.9948396661821742e-05,
"loss": 1.341,
"step": 110
},
{
"epoch": 0.19,
"grad_norm": 0.1592029333114624,
"learning_rate": 1.994653888884335e-05,
"loss": 1.3076,
"step": 111
},
{
"epoch": 0.19,
"grad_norm": 0.21797503530979156,
"learning_rate": 1.9944648353331663e-05,
"loss": 1.3441,
"step": 112
},
{
"epoch": 0.19,
"grad_norm": 0.17762970924377441,
"learning_rate": 1.9942725061513833e-05,
"loss": 1.3432,
"step": 113
},
{
"epoch": 0.19,
"grad_norm": 0.1651773303747177,
"learning_rate": 1.9940769019724926e-05,
"loss": 1.3499,
"step": 114
},
{
"epoch": 0.19,
"grad_norm": 0.1721709966659546,
"learning_rate": 1.9938780234407876e-05,
"loss": 1.361,
"step": 115
},
{
"epoch": 0.19,
"grad_norm": 0.16519513726234436,
"learning_rate": 1.9936758712113464e-05,
"loss": 1.3823,
"step": 116
},
{
"epoch": 0.2,
"grad_norm": 0.17121821641921997,
"learning_rate": 1.9934704459500312e-05,
"loss": 1.378,
"step": 117
},
{
"epoch": 0.2,
"grad_norm": 0.1555752456188202,
"learning_rate": 1.993261748333484e-05,
"loss": 1.2965,
"step": 118
},
{
"epoch": 0.2,
"grad_norm": 0.15821285545825958,
"learning_rate": 1.9930497790491265e-05,
"loss": 1.3857,
"step": 119
},
{
"epoch": 0.2,
"grad_norm": 0.1656782180070877,
"learning_rate": 1.9928345387951563e-05,
"loss": 1.3373,
"step": 120
},
{
"epoch": 0.2,
"grad_norm": 0.19061700999736786,
"learning_rate": 1.9926160282805456e-05,
"loss": 1.4497,
"step": 121
},
{
"epoch": 0.2,
"grad_norm": 0.16132259368896484,
"learning_rate": 1.9923942482250374e-05,
"loss": 1.3188,
"step": 122
},
{
"epoch": 0.21,
"grad_norm": 0.16741512715816498,
"learning_rate": 1.9921691993591455e-05,
"loss": 1.4353,
"step": 123
},
{
"epoch": 0.21,
"grad_norm": 0.1629590541124344,
"learning_rate": 1.991940882424149e-05,
"loss": 1.4078,
"step": 124
},
{
"epoch": 0.21,
"grad_norm": 0.158173069357872,
"learning_rate": 1.9917092981720934e-05,
"loss": 1.3877,
"step": 125
},
{
"epoch": 0.21,
"grad_norm": 0.15278945863246918,
"learning_rate": 1.991474447365785e-05,
"loss": 1.2866,
"step": 126
},
{
"epoch": 0.21,
"grad_norm": 0.15629614889621735,
"learning_rate": 1.9912363307787902e-05,
"loss": 1.3685,
"step": 127
},
{
"epoch": 0.22,
"grad_norm": 0.1634289026260376,
"learning_rate": 1.9909949491954325e-05,
"loss": 1.3361,
"step": 128
},
{
"epoch": 0.22,
"grad_norm": 0.1760614961385727,
"learning_rate": 1.9907503034107893e-05,
"loss": 1.3366,
"step": 129
},
{
"epoch": 0.22,
"grad_norm": 0.16594205796718597,
"learning_rate": 1.99050239423069e-05,
"loss": 1.3213,
"step": 130
},
{
"epoch": 0.22,
"grad_norm": 0.16287142038345337,
"learning_rate": 1.9902512224717147e-05,
"loss": 1.3046,
"step": 131
},
{
"epoch": 0.22,
"grad_norm": 0.16176696121692657,
"learning_rate": 1.9899967889611876e-05,
"loss": 1.3995,
"step": 132
},
{
"epoch": 0.22,
"grad_norm": 0.15077176690101624,
"learning_rate": 1.9897390945371778e-05,
"loss": 1.2764,
"step": 133
},
{
"epoch": 0.23,
"grad_norm": 0.17957943677902222,
"learning_rate": 1.9894781400484956e-05,
"loss": 1.3251,
"step": 134
},
{
"epoch": 0.23,
"grad_norm": 0.15552657842636108,
"learning_rate": 1.989213926354689e-05,
"loss": 1.2835,
"step": 135
},
{
"epoch": 0.23,
"grad_norm": 0.1609240621328354,
"learning_rate": 1.988946454326042e-05,
"loss": 1.3284,
"step": 136
},
{
"epoch": 0.23,
"grad_norm": 0.18385426700115204,
"learning_rate": 1.9886757248435705e-05,
"loss": 1.3721,
"step": 137
},
{
"epoch": 0.23,
"grad_norm": 0.14970523118972778,
"learning_rate": 1.9884017387990205e-05,
"loss": 1.2491,
"step": 138
},
{
"epoch": 0.23,
"grad_norm": 0.1609608381986618,
"learning_rate": 1.988124497094864e-05,
"loss": 1.4012,
"step": 139
},
{
"epoch": 0.24,
"grad_norm": 0.1574031412601471,
"learning_rate": 1.9878440006442972e-05,
"loss": 1.3694,
"step": 140
},
{
"epoch": 0.24,
"grad_norm": 0.15436096489429474,
"learning_rate": 1.987560250371237e-05,
"loss": 1.3719,
"step": 141
},
{
"epoch": 0.24,
"grad_norm": 0.15626446902751923,
"learning_rate": 1.987273247210318e-05,
"loss": 1.2816,
"step": 142
},
{
"epoch": 0.24,
"grad_norm": 0.15678633749485016,
"learning_rate": 1.9869829921068892e-05,
"loss": 1.294,
"step": 143
},
{
"epoch": 0.24,
"grad_norm": 0.16196396946907043,
"learning_rate": 1.9866894860170104e-05,
"loss": 1.3519,
"step": 144
},
{
"epoch": 0.24,
"grad_norm": 0.16082963347434998,
"learning_rate": 1.986392729907451e-05,
"loss": 1.2813,
"step": 145
},
{
"epoch": 0.25,
"grad_norm": 0.17131298780441284,
"learning_rate": 1.9860927247556846e-05,
"loss": 1.333,
"step": 146
},
{
"epoch": 0.25,
"grad_norm": 0.14682374894618988,
"learning_rate": 1.9857894715498873e-05,
"loss": 1.3217,
"step": 147
},
{
"epoch": 0.25,
"grad_norm": 0.1597621738910675,
"learning_rate": 1.9854829712889333e-05,
"loss": 1.3088,
"step": 148
},
{
"epoch": 0.25,
"grad_norm": 0.16332492232322693,
"learning_rate": 1.9851732249823924e-05,
"loss": 1.3484,
"step": 149
},
{
"epoch": 0.25,
"grad_norm": 0.15406523644924164,
"learning_rate": 1.9848602336505262e-05,
"loss": 1.3402,
"step": 150
},
{
"epoch": 0.25,
"grad_norm": 0.14765764772891998,
"learning_rate": 1.9845439983242857e-05,
"loss": 1.3036,
"step": 151
},
{
"epoch": 0.26,
"grad_norm": 0.14781777560710907,
"learning_rate": 1.984224520045306e-05,
"loss": 1.2671,
"step": 152
},
{
"epoch": 0.26,
"grad_norm": 0.1507379412651062,
"learning_rate": 1.983901799865905e-05,
"loss": 1.3549,
"step": 153
},
{
"epoch": 0.26,
"grad_norm": 0.16611751914024353,
"learning_rate": 1.9835758388490783e-05,
"loss": 1.3412,
"step": 154
},
{
"epoch": 0.26,
"grad_norm": 0.1613897681236267,
"learning_rate": 1.9832466380684976e-05,
"loss": 1.3356,
"step": 155
},
{
"epoch": 0.26,
"grad_norm": 0.15234436094760895,
"learning_rate": 1.9829141986085046e-05,
"loss": 1.3136,
"step": 156
},
{
"epoch": 0.26,
"grad_norm": 0.15218184888362885,
"learning_rate": 1.9825785215641088e-05,
"loss": 1.3919,
"step": 157
},
{
"epoch": 0.27,
"grad_norm": 0.14976489543914795,
"learning_rate": 1.9822396080409848e-05,
"loss": 1.3189,
"step": 158
},
{
"epoch": 0.27,
"grad_norm": 0.16917568445205688,
"learning_rate": 1.9818974591554668e-05,
"loss": 1.3115,
"step": 159
},
{
"epoch": 0.27,
"grad_norm": 0.16448311507701874,
"learning_rate": 1.981552076034547e-05,
"loss": 1.3304,
"step": 160
},
{
"epoch": 0.27,
"grad_norm": 0.17942431569099426,
"learning_rate": 1.981203459815869e-05,
"loss": 1.3738,
"step": 161
},
{
"epoch": 0.27,
"grad_norm": 0.15682873129844666,
"learning_rate": 1.9808516116477278e-05,
"loss": 1.3997,
"step": 162
},
{
"epoch": 0.27,
"grad_norm": 0.15899716317653656,
"learning_rate": 1.980496532689062e-05,
"loss": 1.3303,
"step": 163
},
{
"epoch": 0.28,
"grad_norm": 0.15109188854694366,
"learning_rate": 1.9801382241094532e-05,
"loss": 1.3162,
"step": 164
},
{
"epoch": 0.28,
"grad_norm": 0.15101797878742218,
"learning_rate": 1.979776687089121e-05,
"loss": 1.2961,
"step": 165
},
{
"epoch": 0.28,
"grad_norm": 0.15570296347141266,
"learning_rate": 1.9794119228189185e-05,
"loss": 1.3783,
"step": 166
},
{
"epoch": 0.28,
"grad_norm": 0.15653923153877258,
"learning_rate": 1.979043932500329e-05,
"loss": 1.3272,
"step": 167
},
{
"epoch": 0.28,
"grad_norm": 0.1806183159351349,
"learning_rate": 1.9786727173454623e-05,
"loss": 1.353,
"step": 168
},
{
"epoch": 0.28,
"grad_norm": 0.16590842604637146,
"learning_rate": 1.9782982785770497e-05,
"loss": 1.3478,
"step": 169
},
{
"epoch": 0.29,
"grad_norm": 0.15598629415035248,
"learning_rate": 1.9779206174284417e-05,
"loss": 1.3925,
"step": 170
},
{
"epoch": 0.29,
"grad_norm": 0.15137127041816711,
"learning_rate": 1.977539735143602e-05,
"loss": 1.3713,
"step": 171
},
{
"epoch": 0.29,
"grad_norm": 0.1489367038011551,
"learning_rate": 1.9771556329771043e-05,
"loss": 1.2586,
"step": 172
},
{
"epoch": 0.29,
"grad_norm": 0.17419755458831787,
"learning_rate": 1.976768312194129e-05,
"loss": 1.3376,
"step": 173
},
{
"epoch": 0.29,
"grad_norm": 0.15900814533233643,
"learning_rate": 1.9763777740704572e-05,
"loss": 1.2978,
"step": 174
},
{
"epoch": 0.29,
"grad_norm": 0.16462573409080505,
"learning_rate": 1.9759840198924674e-05,
"loss": 1.3108,
"step": 175
},
{
"epoch": 0.3,
"grad_norm": 0.3415735960006714,
"learning_rate": 1.9755870509571324e-05,
"loss": 1.2359,
"step": 176
},
{
"epoch": 0.3,
"grad_norm": 0.1518816500902176,
"learning_rate": 1.9751868685720136e-05,
"loss": 1.3125,
"step": 177
},
{
"epoch": 0.3,
"grad_norm": 0.15554927289485931,
"learning_rate": 1.974783474055256e-05,
"loss": 1.263,
"step": 178
},
{
"epoch": 0.3,
"grad_norm": 0.16101278364658356,
"learning_rate": 1.974376868735586e-05,
"loss": 1.3046,
"step": 179
},
{
"epoch": 0.3,
"grad_norm": 0.17000462114810944,
"learning_rate": 1.9739670539523065e-05,
"loss": 1.3244,
"step": 180
},
{
"epoch": 0.3,
"grad_norm": 0.1521628350019455,
"learning_rate": 1.97355403105529e-05,
"loss": 1.3207,
"step": 181
},
{
"epoch": 0.31,
"grad_norm": 0.18179717659950256,
"learning_rate": 1.973137801404978e-05,
"loss": 1.3671,
"step": 182
},
{
"epoch": 0.31,
"grad_norm": 0.16239362955093384,
"learning_rate": 1.972718366372373e-05,
"loss": 1.3399,
"step": 183
},
{
"epoch": 0.31,
"grad_norm": 0.7205573916435242,
"learning_rate": 1.9722957273390377e-05,
"loss": 1.3471,
"step": 184
},
{
"epoch": 0.31,
"grad_norm": 0.193926602602005,
"learning_rate": 1.9718698856970855e-05,
"loss": 1.3089,
"step": 185
},
{
"epoch": 0.31,
"grad_norm": 0.16461925208568573,
"learning_rate": 1.9714408428491817e-05,
"loss": 1.2604,
"step": 186
},
{
"epoch": 0.31,
"grad_norm": 0.15286822617053986,
"learning_rate": 1.9710086002085335e-05,
"loss": 1.3008,
"step": 187
},
{
"epoch": 0.32,
"grad_norm": 0.169696643948555,
"learning_rate": 1.9705731591988892e-05,
"loss": 1.365,
"step": 188
},
{
"epoch": 0.32,
"grad_norm": 0.15688490867614746,
"learning_rate": 1.970134521254532e-05,
"loss": 1.3128,
"step": 189
},
{
"epoch": 0.32,
"grad_norm": 0.15926450490951538,
"learning_rate": 1.9696926878202746e-05,
"loss": 1.2673,
"step": 190
},
{
"epoch": 0.32,
"grad_norm": 0.16111700236797333,
"learning_rate": 1.9692476603514555e-05,
"loss": 1.372,
"step": 191
},
{
"epoch": 0.32,
"grad_norm": 0.651131272315979,
"learning_rate": 1.9687994403139347e-05,
"loss": 1.3098,
"step": 192
},
{
"epoch": 0.32,
"grad_norm": 0.17667540907859802,
"learning_rate": 1.968348029184087e-05,
"loss": 1.3809,
"step": 193
},
{
"epoch": 0.33,
"grad_norm": 0.1771472841501236,
"learning_rate": 1.9678934284487988e-05,
"loss": 1.373,
"step": 194
},
{
"epoch": 0.33,
"grad_norm": 0.14985056221485138,
"learning_rate": 1.9674356396054624e-05,
"loss": 1.2685,
"step": 195
},
{
"epoch": 0.33,
"grad_norm": 0.15360082685947418,
"learning_rate": 1.9669746641619717e-05,
"loss": 1.3862,
"step": 196
},
{
"epoch": 0.33,
"grad_norm": 0.15841621160507202,
"learning_rate": 1.966510503636716e-05,
"loss": 1.3033,
"step": 197
},
{
"epoch": 0.33,
"grad_norm": 0.1631457805633545,
"learning_rate": 1.9660431595585773e-05,
"loss": 1.3405,
"step": 198
},
{
"epoch": 0.33,
"grad_norm": 0.15417036414146423,
"learning_rate": 1.9655726334669216e-05,
"loss": 1.3001,
"step": 199
},
{
"epoch": 0.34,
"grad_norm": 0.1631031632423401,
"learning_rate": 1.965098926911598e-05,
"loss": 1.3334,
"step": 200
},
{
"epoch": 0.34,
"grad_norm": 0.15555128455162048,
"learning_rate": 1.9646220414529305e-05,
"loss": 1.338,
"step": 201
},
{
"epoch": 0.34,
"grad_norm": 0.16267068684101105,
"learning_rate": 1.9641419786617143e-05,
"loss": 1.3814,
"step": 202
},
{
"epoch": 0.34,
"grad_norm": 0.15920913219451904,
"learning_rate": 1.963658740119211e-05,
"loss": 1.3267,
"step": 203
},
{
"epoch": 0.34,
"grad_norm": 0.15759576857089996,
"learning_rate": 1.9631723274171412e-05,
"loss": 1.3323,
"step": 204
},
{
"epoch": 0.34,
"grad_norm": 0.15223319828510284,
"learning_rate": 1.9626827421576816e-05,
"loss": 1.3074,
"step": 205
},
{
"epoch": 0.35,
"grad_norm": 0.16610486805438995,
"learning_rate": 1.9621899859534594e-05,
"loss": 1.3468,
"step": 206
},
{
"epoch": 0.35,
"grad_norm": 0.1576014757156372,
"learning_rate": 1.9616940604275454e-05,
"loss": 1.3221,
"step": 207
},
{
"epoch": 0.35,
"grad_norm": 0.15478689968585968,
"learning_rate": 1.9611949672134506e-05,
"loss": 1.3689,
"step": 208
},
{
"epoch": 0.35,
"grad_norm": 0.16408346593379974,
"learning_rate": 1.9606927079551187e-05,
"loss": 1.3231,
"step": 209
},
{
"epoch": 0.35,
"grad_norm": 0.14498670399188995,
"learning_rate": 1.9601872843069236e-05,
"loss": 1.2924,
"step": 210
},
{
"epoch": 0.35,
"grad_norm": 0.15980365872383118,
"learning_rate": 1.959678697933661e-05,
"loss": 1.3131,
"step": 211
},
{
"epoch": 0.36,
"grad_norm": 0.15579000115394592,
"learning_rate": 1.9591669505105448e-05,
"loss": 1.2685,
"step": 212
},
{
"epoch": 0.36,
"grad_norm": 0.15318329632282257,
"learning_rate": 1.9586520437232e-05,
"loss": 1.4043,
"step": 213
},
{
"epoch": 0.36,
"grad_norm": 0.15656523406505585,
"learning_rate": 1.9581339792676602e-05,
"loss": 1.371,
"step": 214
},
{
"epoch": 0.36,
"grad_norm": 0.15681013464927673,
"learning_rate": 1.957612758850357e-05,
"loss": 1.3142,
"step": 215
},
{
"epoch": 0.36,
"grad_norm": 0.1462039202451706,
"learning_rate": 1.9570883841881204e-05,
"loss": 1.3148,
"step": 216
},
{
"epoch": 0.36,
"grad_norm": 0.17131565511226654,
"learning_rate": 1.9565608570081674e-05,
"loss": 1.3454,
"step": 217
},
{
"epoch": 0.37,
"grad_norm": 0.15185542404651642,
"learning_rate": 1.9560301790481005e-05,
"loss": 1.2898,
"step": 218
},
{
"epoch": 0.37,
"grad_norm": 0.16285483539104462,
"learning_rate": 1.9554963520559003e-05,
"loss": 1.3785,
"step": 219
},
{
"epoch": 0.37,
"grad_norm": 0.15012206137180328,
"learning_rate": 1.9549593777899193e-05,
"loss": 1.2226,
"step": 220
},
{
"epoch": 0.37,
"grad_norm": 0.15580691397190094,
"learning_rate": 1.9544192580188766e-05,
"loss": 1.2281,
"step": 221
},
{
"epoch": 0.37,
"grad_norm": 0.18193548917770386,
"learning_rate": 1.9538759945218534e-05,
"loss": 1.2269,
"step": 222
},
{
"epoch": 0.37,
"grad_norm": 0.14431047439575195,
"learning_rate": 1.9533295890882848e-05,
"loss": 1.3628,
"step": 223
},
{
"epoch": 0.38,
"grad_norm": 0.155436709523201,
"learning_rate": 1.9527800435179548e-05,
"loss": 1.3569,
"step": 224
},
{
"epoch": 0.38,
"grad_norm": 0.15050619840621948,
"learning_rate": 1.952227359620992e-05,
"loss": 1.3103,
"step": 225
},
{
"epoch": 0.38,
"grad_norm": 0.18012791872024536,
"learning_rate": 1.95167153921786e-05,
"loss": 1.3798,
"step": 226
},
{
"epoch": 0.38,
"grad_norm": 0.14088086783885956,
"learning_rate": 1.951112584139356e-05,
"loss": 1.2466,
"step": 227
},
{
"epoch": 0.38,
"grad_norm": 0.17008265852928162,
"learning_rate": 1.9505504962266005e-05,
"loss": 1.3097,
"step": 228
},
{
"epoch": 0.38,
"grad_norm": 0.17221464216709137,
"learning_rate": 1.9499852773310345e-05,
"loss": 1.3605,
"step": 229
},
{
"epoch": 0.39,
"grad_norm": 0.15964262187480927,
"learning_rate": 1.9494169293144106e-05,
"loss": 1.3642,
"step": 230
},
{
"epoch": 0.39,
"grad_norm": 0.15858659148216248,
"learning_rate": 1.94884545404879e-05,
"loss": 1.3011,
"step": 231
},
{
"epoch": 0.39,
"grad_norm": 0.15182028710842133,
"learning_rate": 1.9482708534165337e-05,
"loss": 1.3169,
"step": 232
},
{
"epoch": 0.39,
"grad_norm": 0.17947536706924438,
"learning_rate": 1.947693129310297e-05,
"loss": 1.3268,
"step": 233
},
{
"epoch": 0.39,
"grad_norm": 0.1559893637895584,
"learning_rate": 1.9471122836330236e-05,
"loss": 1.3257,
"step": 234
},
{
"epoch": 0.39,
"grad_norm": 0.14727315306663513,
"learning_rate": 1.9465283182979396e-05,
"loss": 1.3423,
"step": 235
},
{
"epoch": 0.4,
"grad_norm": 0.15041925013065338,
"learning_rate": 1.9459412352285467e-05,
"loss": 1.2961,
"step": 236
},
{
"epoch": 0.4,
"grad_norm": 0.15340714156627655,
"learning_rate": 1.9453510363586155e-05,
"loss": 1.3252,
"step": 237
},
{
"epoch": 0.4,
"grad_norm": 0.1636042594909668,
"learning_rate": 1.9447577236321806e-05,
"loss": 1.3028,
"step": 238
},
{
"epoch": 0.4,
"grad_norm": 0.15641403198242188,
"learning_rate": 1.9441612990035324e-05,
"loss": 1.3845,
"step": 239
},
{
"epoch": 0.4,
"grad_norm": 0.1722465455532074,
"learning_rate": 1.943561764437212e-05,
"loss": 1.3624,
"step": 240
},
{
"epoch": 0.4,
"grad_norm": 0.1619051992893219,
"learning_rate": 1.9429591219080033e-05,
"loss": 1.3246,
"step": 241
},
{
"epoch": 0.41,
"grad_norm": 0.14947140216827393,
"learning_rate": 1.9423533734009287e-05,
"loss": 1.3551,
"step": 242
},
{
"epoch": 0.41,
"grad_norm": 0.18179036676883698,
"learning_rate": 1.94174452091124e-05,
"loss": 1.3495,
"step": 243
},
{
"epoch": 0.41,
"grad_norm": 0.15705548226833344,
"learning_rate": 1.9411325664444143e-05,
"loss": 1.2849,
"step": 244
},
{
"epoch": 0.41,
"grad_norm": 0.16060465574264526,
"learning_rate": 1.9405175120161453e-05,
"loss": 1.3548,
"step": 245
},
{
"epoch": 0.41,
"grad_norm": 0.16014854609966278,
"learning_rate": 1.939899359652338e-05,
"loss": 1.2388,
"step": 246
},
{
"epoch": 0.41,
"grad_norm": 0.1549297571182251,
"learning_rate": 1.939278111389101e-05,
"loss": 1.4098,
"step": 247
},
{
"epoch": 0.42,
"grad_norm": 0.15960820019245148,
"learning_rate": 1.938653769272741e-05,
"loss": 1.4058,
"step": 248
},
{
"epoch": 0.42,
"grad_norm": 0.2083292156457901,
"learning_rate": 1.9380263353597553e-05,
"loss": 1.3557,
"step": 249
},
{
"epoch": 0.42,
"grad_norm": 0.1558627188205719,
"learning_rate": 1.937395811716825e-05,
"loss": 1.3245,
"step": 250
},
{
"epoch": 0.42,
"grad_norm": 0.17779141664505005,
"learning_rate": 1.936762200420808e-05,
"loss": 1.2765,
"step": 251
},
{
"epoch": 0.42,
"grad_norm": 0.15625056624412537,
"learning_rate": 1.9361255035587338e-05,
"loss": 1.3106,
"step": 252
},
{
"epoch": 0.43,
"grad_norm": 0.15355145931243896,
"learning_rate": 1.9354857232277938e-05,
"loss": 1.3444,
"step": 253
},
{
"epoch": 0.43,
"grad_norm": 0.16163015365600586,
"learning_rate": 1.934842861535337e-05,
"loss": 1.3189,
"step": 254
},
{
"epoch": 0.43,
"grad_norm": 0.16402465105056763,
"learning_rate": 1.9341969205988605e-05,
"loss": 1.3453,
"step": 255
},
{
"epoch": 0.43,
"grad_norm": 0.1472061276435852,
"learning_rate": 1.933547902546006e-05,
"loss": 1.3715,
"step": 256
},
{
"epoch": 0.43,
"grad_norm": 0.19225962460041046,
"learning_rate": 1.93289580951455e-05,
"loss": 1.2784,
"step": 257
},
{
"epoch": 0.43,
"grad_norm": 0.15546953678131104,
"learning_rate": 1.9322406436523965e-05,
"loss": 1.3568,
"step": 258
},
{
"epoch": 0.44,
"grad_norm": 0.1593058705329895,
"learning_rate": 1.931582407117573e-05,
"loss": 1.3299,
"step": 259
},
{
"epoch": 0.44,
"grad_norm": 0.15713098645210266,
"learning_rate": 1.9309211020782192e-05,
"loss": 1.3397,
"step": 260
},
{
"epoch": 0.44,
"grad_norm": 0.152445986866951,
"learning_rate": 1.9302567307125836e-05,
"loss": 1.3449,
"step": 261
},
{
"epoch": 0.44,
"grad_norm": 0.15717758238315582,
"learning_rate": 1.9295892952090143e-05,
"loss": 1.3241,
"step": 262
},
{
"epoch": 0.44,
"grad_norm": 0.1531231552362442,
"learning_rate": 1.928918797765952e-05,
"loss": 1.3516,
"step": 263
},
{
"epoch": 0.44,
"grad_norm": 0.15562032163143158,
"learning_rate": 1.9282452405919235e-05,
"loss": 1.343,
"step": 264
},
{
"epoch": 0.45,
"grad_norm": 0.15916050970554352,
"learning_rate": 1.9275686259055334e-05,
"loss": 1.3511,
"step": 265
},
{
"epoch": 0.45,
"grad_norm": 0.17035844922065735,
"learning_rate": 1.9268889559354576e-05,
"loss": 1.3714,
"step": 266
},
{
"epoch": 0.45,
"grad_norm": 0.1496887505054474,
"learning_rate": 1.9262062329204355e-05,
"loss": 1.3194,
"step": 267
},
{
"epoch": 0.45,
"grad_norm": 0.1466931402683258,
"learning_rate": 1.9255204591092634e-05,
"loss": 1.3235,
"step": 268
},
{
"epoch": 0.45,
"grad_norm": 0.1472676545381546,
"learning_rate": 1.9248316367607858e-05,
"loss": 1.2518,
"step": 269
},
{
"epoch": 0.45,
"grad_norm": 0.16347859799861908,
"learning_rate": 1.9241397681438887e-05,
"loss": 1.3375,
"step": 270
},
{
"epoch": 0.46,
"grad_norm": 0.1519639641046524,
"learning_rate": 1.923444855537493e-05,
"loss": 1.3081,
"step": 271
},
{
"epoch": 0.46,
"grad_norm": 0.15689344704151154,
"learning_rate": 1.9227469012305448e-05,
"loss": 1.3512,
"step": 272
},
{
"epoch": 0.46,
"grad_norm": 0.15142163634300232,
"learning_rate": 1.9220459075220095e-05,
"loss": 1.3023,
"step": 273
},
{
"epoch": 0.46,
"grad_norm": 0.1454487144947052,
"learning_rate": 1.9213418767208648e-05,
"loss": 1.3305,
"step": 274
},
{
"epoch": 0.46,
"grad_norm": 0.15581363439559937,
"learning_rate": 1.9206348111460914e-05,
"loss": 1.3362,
"step": 275
},
{
"epoch": 0.46,
"grad_norm": 0.1467771828174591,
"learning_rate": 1.919924713126666e-05,
"loss": 1.2882,
"step": 276
},
{
"epoch": 0.47,
"grad_norm": 0.1589370220899582,
"learning_rate": 1.919211585001554e-05,
"loss": 1.3969,
"step": 277
},
{
"epoch": 0.47,
"grad_norm": 0.1439632922410965,
"learning_rate": 1.9184954291197013e-05,
"loss": 1.3405,
"step": 278
},
{
"epoch": 0.47,
"grad_norm": 0.14654165506362915,
"learning_rate": 1.9177762478400276e-05,
"loss": 1.3628,
"step": 279
},
{
"epoch": 0.47,
"grad_norm": 0.15207156538963318,
"learning_rate": 1.9170540435314167e-05,
"loss": 1.3192,
"step": 280
},
{
"epoch": 0.47,
"grad_norm": 0.1550934612751007,
"learning_rate": 1.916328818572711e-05,
"loss": 1.2997,
"step": 281
},
{
"epoch": 0.47,
"grad_norm": 0.15896658599376678,
"learning_rate": 1.9156005753527013e-05,
"loss": 1.313,
"step": 282
},
{
"epoch": 0.48,
"grad_norm": 0.1461910903453827,
"learning_rate": 1.914869316270121e-05,
"loss": 1.2686,
"step": 283
},
{
"epoch": 0.48,
"grad_norm": 0.2558024823665619,
"learning_rate": 1.9141350437336374e-05,
"loss": 1.3308,
"step": 284
},
{
"epoch": 0.48,
"grad_norm": 0.16172054409980774,
"learning_rate": 1.9133977601618433e-05,
"loss": 1.3029,
"step": 285
},
{
"epoch": 0.48,
"grad_norm": 0.143438458442688,
"learning_rate": 1.912657467983249e-05,
"loss": 1.3168,
"step": 286
},
{
"epoch": 0.48,
"grad_norm": 0.1616157591342926,
"learning_rate": 1.911914169636276e-05,
"loss": 1.3542,
"step": 287
},
{
"epoch": 0.48,
"grad_norm": 0.1510392427444458,
"learning_rate": 1.9111678675692468e-05,
"loss": 1.2391,
"step": 288
},
{
"epoch": 0.49,
"grad_norm": 0.15160228312015533,
"learning_rate": 1.9104185642403775e-05,
"loss": 1.3301,
"step": 289
},
{
"epoch": 0.49,
"grad_norm": 0.15298177301883698,
"learning_rate": 1.909666262117771e-05,
"loss": 1.3385,
"step": 290
},
{
"epoch": 0.49,
"grad_norm": 0.14593380689620972,
"learning_rate": 1.908910963679407e-05,
"loss": 1.3486,
"step": 291
},
{
"epoch": 0.49,
"grad_norm": 0.23284722864627838,
"learning_rate": 1.9081526714131345e-05,
"loss": 1.3028,
"step": 292
},
{
"epoch": 0.49,
"grad_norm": 0.1577797383069992,
"learning_rate": 1.9073913878166642e-05,
"loss": 1.2827,
"step": 293
},
{
"epoch": 0.49,
"grad_norm": 0.15135832130908966,
"learning_rate": 1.9066271153975602e-05,
"loss": 1.3179,
"step": 294
},
{
"epoch": 0.5,
"grad_norm": 0.1568109542131424,
"learning_rate": 1.90585985667323e-05,
"loss": 1.2906,
"step": 295
},
{
"epoch": 0.5,
"grad_norm": 0.1509489119052887,
"learning_rate": 1.9050896141709198e-05,
"loss": 1.3785,
"step": 296
},
{
"epoch": 0.5,
"grad_norm": 0.14765121042728424,
"learning_rate": 1.904316390427702e-05,
"loss": 1.2586,
"step": 297
},
{
"epoch": 0.5,
"grad_norm": 0.14666473865509033,
"learning_rate": 1.9035401879904687e-05,
"loss": 1.3265,
"step": 298
},
{
"epoch": 0.5,
"grad_norm": 0.1482343077659607,
"learning_rate": 1.902761009415925e-05,
"loss": 1.3288,
"step": 299
},
{
"epoch": 0.5,
"grad_norm": 0.15392710268497467,
"learning_rate": 1.901978857270578e-05,
"loss": 1.3739,
"step": 300
},
{
"epoch": 0.51,
"grad_norm": 0.15929271280765533,
"learning_rate": 1.9011937341307292e-05,
"loss": 1.4441,
"step": 301
},
{
"epoch": 0.51,
"grad_norm": 0.15494123101234436,
"learning_rate": 1.9004056425824672e-05,
"loss": 1.2291,
"step": 302
},
{
"epoch": 0.51,
"grad_norm": 0.15912345051765442,
"learning_rate": 1.899614585221656e-05,
"loss": 1.3614,
"step": 303
},
{
"epoch": 0.51,
"grad_norm": 0.14854875206947327,
"learning_rate": 1.898820564653931e-05,
"loss": 1.3281,
"step": 304
},
{
"epoch": 0.51,
"grad_norm": 0.1518874615430832,
"learning_rate": 1.898023583494687e-05,
"loss": 1.3495,
"step": 305
},
{
"epoch": 0.51,
"grad_norm": 0.15471160411834717,
"learning_rate": 1.8972236443690694e-05,
"loss": 1.3025,
"step": 306
},
{
"epoch": 0.52,
"grad_norm": 0.14632518589496613,
"learning_rate": 1.8964207499119686e-05,
"loss": 1.2828,
"step": 307
},
{
"epoch": 0.52,
"grad_norm": 0.14436408877372742,
"learning_rate": 1.8956149027680085e-05,
"loss": 1.3215,
"step": 308
},
{
"epoch": 0.52,
"grad_norm": 0.1432180106639862,
"learning_rate": 1.8948061055915395e-05,
"loss": 1.3367,
"step": 309
},
{
"epoch": 0.52,
"grad_norm": 0.15100084245204926,
"learning_rate": 1.893994361046628e-05,
"loss": 1.3321,
"step": 310
},
{
"epoch": 0.52,
"grad_norm": 0.1476551741361618,
"learning_rate": 1.8931796718070487e-05,
"loss": 1.3087,
"step": 311
},
{
"epoch": 0.52,
"grad_norm": 0.15668947994709015,
"learning_rate": 1.8923620405562768e-05,
"loss": 1.4232,
"step": 312
},
{
"epoch": 0.53,
"grad_norm": 0.14729134738445282,
"learning_rate": 1.891541469987477e-05,
"loss": 1.3333,
"step": 313
},
{
"epoch": 0.53,
"grad_norm": 0.15708036720752716,
"learning_rate": 1.8907179628034957e-05,
"loss": 1.2837,
"step": 314
},
{
"epoch": 0.53,
"grad_norm": 0.14895762503147125,
"learning_rate": 1.8898915217168528e-05,
"loss": 1.3339,
"step": 315
},
{
"epoch": 0.53,
"grad_norm": 0.16167424619197845,
"learning_rate": 1.8890621494497322e-05,
"loss": 1.357,
"step": 316
},
{
"epoch": 0.53,
"grad_norm": 0.1540985405445099,
"learning_rate": 1.888229848733972e-05,
"loss": 1.3907,
"step": 317
},
{
"epoch": 0.53,
"grad_norm": 0.16398704051971436,
"learning_rate": 1.8873946223110564e-05,
"loss": 1.3349,
"step": 318
},
{
"epoch": 0.54,
"grad_norm": 0.15464505553245544,
"learning_rate": 1.8865564729321062e-05,
"loss": 1.2991,
"step": 319
},
{
"epoch": 0.54,
"grad_norm": 0.1430719792842865,
"learning_rate": 1.885715403357871e-05,
"loss": 1.3175,
"step": 320
},
{
"epoch": 0.54,
"grad_norm": 0.1606094092130661,
"learning_rate": 1.8848714163587178e-05,
"loss": 1.3142,
"step": 321
},
{
"epoch": 0.54,
"grad_norm": 0.1566372811794281,
"learning_rate": 1.8840245147146245e-05,
"loss": 1.3133,
"step": 322
},
{
"epoch": 0.54,
"grad_norm": 0.15060843527317047,
"learning_rate": 1.8831747012151687e-05,
"loss": 1.3333,
"step": 323
},
{
"epoch": 0.54,
"grad_norm": 0.15068593621253967,
"learning_rate": 1.882321978659519e-05,
"loss": 1.3938,
"step": 324
},
{
"epoch": 0.55,
"grad_norm": 0.16211798787117004,
"learning_rate": 1.8814663498564267e-05,
"loss": 1.3081,
"step": 325
},
{
"epoch": 0.55,
"grad_norm": 0.1806996911764145,
"learning_rate": 1.8806078176242158e-05,
"loss": 1.273,
"step": 326
},
{
"epoch": 0.55,
"grad_norm": 0.14823010563850403,
"learning_rate": 1.8797463847907732e-05,
"loss": 1.2599,
"step": 327
},
{
"epoch": 0.55,
"grad_norm": 0.15183423459529877,
"learning_rate": 1.8788820541935403e-05,
"loss": 1.3626,
"step": 328
},
{
"epoch": 0.55,
"grad_norm": 0.15430210530757904,
"learning_rate": 1.8780148286795037e-05,
"loss": 1.3076,
"step": 329
},
{
"epoch": 0.55,
"grad_norm": 0.158985897898674,
"learning_rate": 1.8771447111051854e-05,
"loss": 1.363,
"step": 330
},
{
"epoch": 0.56,
"grad_norm": 0.15100038051605225,
"learning_rate": 1.8762717043366324e-05,
"loss": 1.2647,
"step": 331
},
{
"epoch": 0.56,
"grad_norm": 0.14728271961212158,
"learning_rate": 1.87539581124941e-05,
"loss": 1.323,
"step": 332
},
{
"epoch": 0.56,
"grad_norm": 0.18443995714187622,
"learning_rate": 1.874517034728589e-05,
"loss": 1.3526,
"step": 333
},
{
"epoch": 0.56,
"grad_norm": 0.1705956757068634,
"learning_rate": 1.873635377668739e-05,
"loss": 1.3675,
"step": 334
},
{
"epoch": 0.56,
"grad_norm": 0.14912624657154083,
"learning_rate": 1.8727508429739172e-05,
"loss": 1.303,
"step": 335
},
{
"epoch": 0.56,
"grad_norm": 0.15988433361053467,
"learning_rate": 1.87186343355766e-05,
"loss": 1.3503,
"step": 336
},
{
"epoch": 0.57,
"grad_norm": 0.1975414901971817,
"learning_rate": 1.870973152342971e-05,
"loss": 1.3636,
"step": 337
},
{
"epoch": 0.57,
"grad_norm": 0.1463659554719925,
"learning_rate": 1.870080002262315e-05,
"loss": 1.2705,
"step": 338
},
{
"epoch": 0.57,
"grad_norm": 0.14839382469654083,
"learning_rate": 1.869183986257606e-05,
"loss": 1.2638,
"step": 339
},
{
"epoch": 0.57,
"grad_norm": 0.14624640345573425,
"learning_rate": 1.868285107280197e-05,
"loss": 1.2893,
"step": 340
},
{
"epoch": 0.57,
"grad_norm": 0.1559121459722519,
"learning_rate": 1.8673833682908724e-05,
"loss": 1.3034,
"step": 341
},
{
"epoch": 0.57,
"grad_norm": 0.1462070792913437,
"learning_rate": 1.866478772259836e-05,
"loss": 1.3006,
"step": 342
},
{
"epoch": 0.58,
"grad_norm": 0.15308046340942383,
"learning_rate": 1.865571322166704e-05,
"loss": 1.3051,
"step": 343
},
{
"epoch": 0.58,
"grad_norm": 0.14594431221485138,
"learning_rate": 1.8646610210004912e-05,
"loss": 1.2814,
"step": 344
},
{
"epoch": 0.58,
"grad_norm": 0.149860218167305,
"learning_rate": 1.8637478717596056e-05,
"loss": 1.3348,
"step": 345
},
{
"epoch": 0.58,
"grad_norm": 0.14472465217113495,
"learning_rate": 1.8628318774518347e-05,
"loss": 1.3192,
"step": 346
},
{
"epoch": 0.58,
"grad_norm": 0.14928650856018066,
"learning_rate": 1.8619130410943386e-05,
"loss": 1.3442,
"step": 347
},
{
"epoch": 0.58,
"grad_norm": 0.1469736397266388,
"learning_rate": 1.8609913657136375e-05,
"loss": 1.4005,
"step": 348
},
{
"epoch": 0.59,
"grad_norm": 0.16151031851768494,
"learning_rate": 1.860066854345604e-05,
"loss": 1.3263,
"step": 349
},
{
"epoch": 0.59,
"grad_norm": 0.1534898281097412,
"learning_rate": 1.859139510035451e-05,
"loss": 1.3609,
"step": 350
},
{
"epoch": 0.59,
"grad_norm": 0.1836443841457367,
"learning_rate": 1.8582093358377245e-05,
"loss": 1.3158,
"step": 351
},
{
"epoch": 0.59,
"grad_norm": 0.14370837807655334,
"learning_rate": 1.8572763348162893e-05,
"loss": 1.326,
"step": 352
},
{
"epoch": 0.59,
"grad_norm": 0.33690914511680603,
"learning_rate": 1.8563405100443233e-05,
"loss": 1.3528,
"step": 353
},
{
"epoch": 0.59,
"grad_norm": 0.1471925675868988,
"learning_rate": 1.8554018646043045e-05,
"loss": 1.2951,
"step": 354
},
{
"epoch": 0.6,
"grad_norm": 0.1465420424938202,
"learning_rate": 1.8544604015880024e-05,
"loss": 1.3042,
"step": 355
},
{
"epoch": 0.6,
"grad_norm": 0.15233609080314636,
"learning_rate": 1.8535161240964666e-05,
"loss": 1.3348,
"step": 356
},
{
"epoch": 0.6,
"grad_norm": 0.14329326152801514,
"learning_rate": 1.852569035240018e-05,
"loss": 1.2539,
"step": 357
},
{
"epoch": 0.6,
"grad_norm": 0.1572476178407669,
"learning_rate": 1.8516191381382367e-05,
"loss": 1.3509,
"step": 358
},
{
"epoch": 0.6,
"grad_norm": 0.14190582931041718,
"learning_rate": 1.8506664359199536e-05,
"loss": 1.2363,
"step": 359
},
{
"epoch": 0.6,
"grad_norm": 0.1511862426996231,
"learning_rate": 1.8497109317232386e-05,
"loss": 1.2926,
"step": 360
},
{
"epoch": 0.61,
"grad_norm": 0.15441784262657166,
"learning_rate": 1.8487526286953922e-05,
"loss": 1.3389,
"step": 361
},
{
"epoch": 0.61,
"grad_norm": 0.1593203991651535,
"learning_rate": 1.8477915299929317e-05,
"loss": 1.3323,
"step": 362
},
{
"epoch": 0.61,
"grad_norm": 0.17237433791160583,
"learning_rate": 1.8468276387815853e-05,
"loss": 1.2756,
"step": 363
},
{
"epoch": 0.61,
"grad_norm": 0.15212436020374298,
"learning_rate": 1.8458609582362783e-05,
"loss": 1.3487,
"step": 364
},
{
"epoch": 0.61,
"grad_norm": 0.1509844809770584,
"learning_rate": 1.8448914915411227e-05,
"loss": 1.2997,
"step": 365
},
{
"epoch": 0.61,
"grad_norm": 0.1497824341058731,
"learning_rate": 1.8439192418894096e-05,
"loss": 1.2858,
"step": 366
},
{
"epoch": 0.62,
"grad_norm": 0.1543625295162201,
"learning_rate": 1.8429442124835955e-05,
"loss": 1.2635,
"step": 367
},
{
"epoch": 0.62,
"grad_norm": 0.14757508039474487,
"learning_rate": 1.8419664065352933e-05,
"loss": 1.2856,
"step": 368
},
{
"epoch": 0.62,
"grad_norm": 0.23889374732971191,
"learning_rate": 1.840985827265262e-05,
"loss": 1.2669,
"step": 369
},
{
"epoch": 0.62,
"grad_norm": 0.14553211629390717,
"learning_rate": 1.8400024779033944e-05,
"loss": 1.3678,
"step": 370
},
{
"epoch": 0.62,
"grad_norm": 0.15173819661140442,
"learning_rate": 1.839016361688709e-05,
"loss": 1.3373,
"step": 371
},
{
"epoch": 0.62,
"grad_norm": 0.14966481924057007,
"learning_rate": 1.8380274818693366e-05,
"loss": 1.3075,
"step": 372
},
{
"epoch": 0.63,
"grad_norm": 0.15706394612789154,
"learning_rate": 1.8370358417025116e-05,
"loss": 1.3383,
"step": 373
},
{
"epoch": 0.63,
"grad_norm": 0.2577916979789734,
"learning_rate": 1.8360414444545608e-05,
"loss": 1.3489,
"step": 374
},
{
"epoch": 0.63,
"grad_norm": 0.24489471316337585,
"learning_rate": 1.835044293400892e-05,
"loss": 1.2881,
"step": 375
},
{
"epoch": 0.63,
"grad_norm": 0.14937125146389008,
"learning_rate": 1.8340443918259842e-05,
"loss": 1.393,
"step": 376
},
{
"epoch": 0.63,
"grad_norm": 0.14557434618473053,
"learning_rate": 1.8330417430233756e-05,
"loss": 1.3315,
"step": 377
},
{
"epoch": 0.64,
"grad_norm": 0.22447769343852997,
"learning_rate": 1.8320363502956533e-05,
"loss": 1.331,
"step": 378
},
{
"epoch": 0.64,
"grad_norm": 0.1532454937696457,
"learning_rate": 1.8310282169544436e-05,
"loss": 1.3164,
"step": 379
},
{
"epoch": 0.64,
"grad_norm": 0.15634536743164062,
"learning_rate": 1.8300173463203986e-05,
"loss": 1.4203,
"step": 380
},
{
"epoch": 0.64,
"grad_norm": 0.166867196559906,
"learning_rate": 1.829003741723188e-05,
"loss": 1.3482,
"step": 381
},
{
"epoch": 0.64,
"grad_norm": 0.14226773381233215,
"learning_rate": 1.827987406501486e-05,
"loss": 1.341,
"step": 382
},
{
"epoch": 0.64,
"grad_norm": 0.14897264540195465,
"learning_rate": 1.826968344002961e-05,
"loss": 1.3164,
"step": 383
},
{
"epoch": 0.65,
"grad_norm": 0.14921049773693085,
"learning_rate": 1.825946557584265e-05,
"loss": 1.3575,
"step": 384
},
{
"epoch": 0.65,
"grad_norm": 0.1691223531961441,
"learning_rate": 1.8249220506110225e-05,
"loss": 1.3106,
"step": 385
},
{
"epoch": 0.65,
"grad_norm": 0.14653781056404114,
"learning_rate": 1.8238948264578185e-05,
"loss": 1.3769,
"step": 386
},
{
"epoch": 0.65,
"grad_norm": 0.1483498364686966,
"learning_rate": 1.8228648885081886e-05,
"loss": 1.3134,
"step": 387
},
{
"epoch": 0.65,
"grad_norm": 0.17251162230968475,
"learning_rate": 1.8218322401546063e-05,
"loss": 1.28,
"step": 388
},
{
"epoch": 0.65,
"grad_norm": 0.15499843657016754,
"learning_rate": 1.820796884798474e-05,
"loss": 1.2516,
"step": 389
},
{
"epoch": 0.66,
"grad_norm": 0.1524689644575119,
"learning_rate": 1.81975882585011e-05,
"loss": 1.2891,
"step": 390
},
{
"epoch": 0.66,
"grad_norm": 0.18753460049629211,
"learning_rate": 1.818718066728738e-05,
"loss": 1.3317,
"step": 391
},
{
"epoch": 0.66,
"grad_norm": 0.15421994030475616,
"learning_rate": 1.8176746108624756e-05,
"loss": 1.4153,
"step": 392
},
{
"epoch": 0.66,
"grad_norm": 0.17222876846790314,
"learning_rate": 1.816628461688323e-05,
"loss": 1.3106,
"step": 393
},
{
"epoch": 0.66,
"grad_norm": 0.16242216527462006,
"learning_rate": 1.8155796226521522e-05,
"loss": 1.3807,
"step": 394
},
{
"epoch": 0.66,
"grad_norm": 0.1497281938791275,
"learning_rate": 1.8145280972086946e-05,
"loss": 1.3209,
"step": 395
},
{
"epoch": 0.67,
"grad_norm": 0.15559233725070953,
"learning_rate": 1.8134738888215308e-05,
"loss": 1.3251,
"step": 396
},
{
"epoch": 0.67,
"grad_norm": 0.2383277863264084,
"learning_rate": 1.8124170009630788e-05,
"loss": 1.2843,
"step": 397
},
{
"epoch": 0.67,
"grad_norm": 0.1535133570432663,
"learning_rate": 1.8113574371145817e-05,
"loss": 1.3841,
"step": 398
},
{
"epoch": 0.67,
"grad_norm": 0.15431390702724457,
"learning_rate": 1.810295200766097e-05,
"loss": 1.2803,
"step": 399
},
{
"epoch": 0.67,
"grad_norm": 0.1573055535554886,
"learning_rate": 1.8092302954164863e-05,
"loss": 1.3007,
"step": 400
},
{
"epoch": 0.67,
"grad_norm": 0.16370755434036255,
"learning_rate": 1.808162724573401e-05,
"loss": 1.2791,
"step": 401
},
{
"epoch": 0.68,
"grad_norm": 0.20099897682666779,
"learning_rate": 1.8070924917532725e-05,
"loss": 1.2399,
"step": 402
},
{
"epoch": 0.68,
"grad_norm": 0.29741019010543823,
"learning_rate": 1.8060196004813013e-05,
"loss": 1.2888,
"step": 403
},
{
"epoch": 0.68,
"grad_norm": 0.14683599770069122,
"learning_rate": 1.8049440542914438e-05,
"loss": 1.301,
"step": 404
},
{
"epoch": 0.68,
"grad_norm": 0.14803573489189148,
"learning_rate": 1.803865856726402e-05,
"loss": 1.2875,
"step": 405
},
{
"epoch": 0.68,
"grad_norm": 0.14676880836486816,
"learning_rate": 1.8027850113376092e-05,
"loss": 1.3015,
"step": 406
},
{
"epoch": 0.68,
"grad_norm": 0.14843180775642395,
"learning_rate": 1.801701521685223e-05,
"loss": 1.3201,
"step": 407
},
{
"epoch": 0.69,
"grad_norm": 0.15639850497245789,
"learning_rate": 1.8006153913381098e-05,
"loss": 1.3571,
"step": 408
},
{
"epoch": 0.69,
"grad_norm": 0.1461266577243805,
"learning_rate": 1.7995266238738324e-05,
"loss": 1.3085,
"step": 409
},
{
"epoch": 0.69,
"grad_norm": 0.1541517972946167,
"learning_rate": 1.798435222878642e-05,
"loss": 1.3525,
"step": 410
},
{
"epoch": 0.69,
"grad_norm": 0.15596306324005127,
"learning_rate": 1.797341191947464e-05,
"loss": 1.3209,
"step": 411
},
{
"epoch": 0.69,
"grad_norm": 0.1753711998462677,
"learning_rate": 1.7962445346838856e-05,
"loss": 1.2966,
"step": 412
},
{
"epoch": 0.69,
"grad_norm": 0.16760778427124023,
"learning_rate": 1.7951452547001454e-05,
"loss": 1.2777,
"step": 413
},
{
"epoch": 0.7,
"grad_norm": 0.14851422607898712,
"learning_rate": 1.794043355617121e-05,
"loss": 1.2955,
"step": 414
},
{
"epoch": 0.7,
"grad_norm": 0.14428943395614624,
"learning_rate": 1.792938841064316e-05,
"loss": 1.322,
"step": 415
},
{
"epoch": 0.7,
"grad_norm": 0.147806778550148,
"learning_rate": 1.79183171467985e-05,
"loss": 1.4356,
"step": 416
},
{
"epoch": 0.7,
"grad_norm": 0.14978933334350586,
"learning_rate": 1.7907219801104458e-05,
"loss": 1.3197,
"step": 417
},
{
"epoch": 0.7,
"grad_norm": 0.15199780464172363,
"learning_rate": 1.789609641011416e-05,
"loss": 1.2553,
"step": 418
},
{
"epoch": 0.7,
"grad_norm": 0.18434306979179382,
"learning_rate": 1.7884947010466533e-05,
"loss": 1.2829,
"step": 419
},
{
"epoch": 0.71,
"grad_norm": 0.1494559347629547,
"learning_rate": 1.7873771638886163e-05,
"loss": 1.3004,
"step": 420
},
{
"epoch": 0.71,
"grad_norm": 0.15020005404949188,
"learning_rate": 1.7862570332183193e-05,
"loss": 1.2689,
"step": 421
},
{
"epoch": 0.71,
"grad_norm": 0.21198858320713043,
"learning_rate": 1.785134312725319e-05,
"loss": 1.2978,
"step": 422
},
{
"epoch": 0.71,
"grad_norm": 0.14668674767017365,
"learning_rate": 1.7840090061077023e-05,
"loss": 1.2458,
"step": 423
},
{
"epoch": 0.71,
"grad_norm": 0.15473228693008423,
"learning_rate": 1.782881117072075e-05,
"loss": 1.3109,
"step": 424
},
{
"epoch": 0.71,
"grad_norm": 0.15694057941436768,
"learning_rate": 1.7817506493335485e-05,
"loss": 1.2955,
"step": 425
},
{
"epoch": 0.72,
"grad_norm": 0.14998142421245575,
"learning_rate": 1.7806176066157285e-05,
"loss": 1.3279,
"step": 426
},
{
"epoch": 0.72,
"grad_norm": 0.22829650342464447,
"learning_rate": 1.7794819926507013e-05,
"loss": 1.3223,
"step": 427
},
{
"epoch": 0.72,
"grad_norm": 0.15419165790081024,
"learning_rate": 1.7783438111790247e-05,
"loss": 1.2545,
"step": 428
},
{
"epoch": 0.72,
"grad_norm": 0.14832891523838043,
"learning_rate": 1.7772030659497112e-05,
"loss": 1.3429,
"step": 429
},
{
"epoch": 0.72,
"grad_norm": 0.15142028033733368,
"learning_rate": 1.7760597607202195e-05,
"loss": 1.229,
"step": 430
},
{
"epoch": 0.72,
"grad_norm": 0.1493011862039566,
"learning_rate": 1.7749138992564395e-05,
"loss": 1.3663,
"step": 431
},
{
"epoch": 0.73,
"grad_norm": 0.19301459193229675,
"learning_rate": 1.7737654853326818e-05,
"loss": 1.2877,
"step": 432
},
{
"epoch": 0.73,
"grad_norm": 0.2175184041261673,
"learning_rate": 1.7726145227316642e-05,
"loss": 1.2534,
"step": 433
},
{
"epoch": 0.73,
"grad_norm": 0.170121967792511,
"learning_rate": 1.7714610152444994e-05,
"loss": 1.3817,
"step": 434
},
{
"epoch": 0.73,
"grad_norm": 0.14693012833595276,
"learning_rate": 1.7703049666706826e-05,
"loss": 1.3544,
"step": 435
},
{
"epoch": 0.73,
"grad_norm": 0.15702098608016968,
"learning_rate": 1.7691463808180797e-05,
"loss": 1.3407,
"step": 436
},
{
"epoch": 0.73,
"grad_norm": 0.1530607044696808,
"learning_rate": 1.7679852615029124e-05,
"loss": 1.3075,
"step": 437
},
{
"epoch": 0.74,
"grad_norm": 0.15074275434017181,
"learning_rate": 1.7668216125497487e-05,
"loss": 1.415,
"step": 438
},
{
"epoch": 0.74,
"grad_norm": 0.17382702231407166,
"learning_rate": 1.7656554377914887e-05,
"loss": 1.2609,
"step": 439
},
{
"epoch": 0.74,
"grad_norm": 0.15414436161518097,
"learning_rate": 1.7644867410693517e-05,
"loss": 1.3245,
"step": 440
},
{
"epoch": 0.74,
"grad_norm": 0.14748679101467133,
"learning_rate": 1.7633155262328644e-05,
"loss": 1.3218,
"step": 441
},
{
"epoch": 0.74,
"grad_norm": 0.14213520288467407,
"learning_rate": 1.7621417971398472e-05,
"loss": 1.3097,
"step": 442
},
{
"epoch": 0.74,
"grad_norm": 0.1499529927968979,
"learning_rate": 1.760965557656403e-05,
"loss": 1.3159,
"step": 443
},
{
"epoch": 0.75,
"grad_norm": 0.15373371541500092,
"learning_rate": 1.7597868116569036e-05,
"loss": 1.3102,
"step": 444
},
{
"epoch": 0.75,
"grad_norm": 0.1485459953546524,
"learning_rate": 1.7586055630239755e-05,
"loss": 1.3018,
"step": 445
},
{
"epoch": 0.75,
"grad_norm": 0.15774905681610107,
"learning_rate": 1.7574218156484896e-05,
"loss": 1.2975,
"step": 446
},
{
"epoch": 0.75,
"grad_norm": 0.1483812928199768,
"learning_rate": 1.7562355734295478e-05,
"loss": 1.2975,
"step": 447
},
{
"epoch": 0.75,
"grad_norm": 0.16566769778728485,
"learning_rate": 1.7550468402744685e-05,
"loss": 1.3145,
"step": 448
},
{
"epoch": 0.75,
"grad_norm": 0.14695635437965393,
"learning_rate": 1.753855620098776e-05,
"loss": 1.2927,
"step": 449
},
{
"epoch": 0.76,
"grad_norm": 0.1469867080450058,
"learning_rate": 1.7526619168261856e-05,
"loss": 1.2627,
"step": 450
},
{
"epoch": 0.76,
"grad_norm": 0.15066002309322357,
"learning_rate": 1.7514657343885923e-05,
"loss": 1.3224,
"step": 451
},
{
"epoch": 0.76,
"grad_norm": 0.15294314920902252,
"learning_rate": 1.7502670767260562e-05,
"loss": 1.3363,
"step": 452
},
{
"epoch": 0.76,
"grad_norm": 0.16633671522140503,
"learning_rate": 1.749065947786792e-05,
"loss": 1.3089,
"step": 453
},
{
"epoch": 0.76,
"grad_norm": 0.15498566627502441,
"learning_rate": 1.7478623515271526e-05,
"loss": 1.2816,
"step": 454
},
{
"epoch": 0.76,
"grad_norm": 0.15707740187644958,
"learning_rate": 1.7466562919116192e-05,
"loss": 1.2932,
"step": 455
},
{
"epoch": 0.77,
"grad_norm": 0.165644109249115,
"learning_rate": 1.745447772912787e-05,
"loss": 1.3519,
"step": 456
},
{
"epoch": 0.77,
"grad_norm": 0.1507626622915268,
"learning_rate": 1.7442367985113504e-05,
"loss": 1.3745,
"step": 457
},
{
"epoch": 0.77,
"grad_norm": 0.15195555984973907,
"learning_rate": 1.743023372696094e-05,
"loss": 1.3419,
"step": 458
},
{
"epoch": 0.77,
"grad_norm": 0.15092672407627106,
"learning_rate": 1.7418074994638752e-05,
"loss": 1.3152,
"step": 459
},
{
"epoch": 0.77,
"grad_norm": 0.15098002552986145,
"learning_rate": 1.7405891828196138e-05,
"loss": 1.315,
"step": 460
},
{
"epoch": 0.77,
"grad_norm": 0.14881564676761627,
"learning_rate": 1.7393684267762773e-05,
"loss": 1.2516,
"step": 461
},
{
"epoch": 0.78,
"grad_norm": 0.15771602094173431,
"learning_rate": 1.7381452353548687e-05,
"loss": 1.294,
"step": 462
},
{
"epoch": 0.78,
"grad_norm": 0.19221486151218414,
"learning_rate": 1.7369196125844124e-05,
"loss": 1.3867,
"step": 463
},
{
"epoch": 0.78,
"grad_norm": 0.1485375463962555,
"learning_rate": 1.7356915625019416e-05,
"loss": 1.3202,
"step": 464
},
{
"epoch": 0.78,
"grad_norm": 0.1480698585510254,
"learning_rate": 1.734461089152484e-05,
"loss": 1.301,
"step": 465
},
{
"epoch": 0.78,
"grad_norm": 0.1499594748020172,
"learning_rate": 1.7332281965890513e-05,
"loss": 1.3391,
"step": 466
},
{
"epoch": 0.78,
"grad_norm": 0.15874969959259033,
"learning_rate": 1.731992888872621e-05,
"loss": 1.3246,
"step": 467
},
{
"epoch": 0.79,
"grad_norm": 0.14509308338165283,
"learning_rate": 1.7307551700721273e-05,
"loss": 1.2611,
"step": 468
},
{
"epoch": 0.79,
"grad_norm": 0.15065301954746246,
"learning_rate": 1.729515044264447e-05,
"loss": 1.28,
"step": 469
},
{
"epoch": 0.79,
"grad_norm": 0.1511443853378296,
"learning_rate": 1.7282725155343828e-05,
"loss": 1.37,
"step": 470
},
{
"epoch": 0.79,
"grad_norm": 0.16135594248771667,
"learning_rate": 1.7270275879746547e-05,
"loss": 1.2823,
"step": 471
},
{
"epoch": 0.79,
"grad_norm": 0.1686365306377411,
"learning_rate": 1.7257802656858826e-05,
"loss": 1.3356,
"step": 472
},
{
"epoch": 0.79,
"grad_norm": 0.16540803015232086,
"learning_rate": 1.724530552776575e-05,
"loss": 1.3159,
"step": 473
},
{
"epoch": 0.8,
"grad_norm": 0.23215636610984802,
"learning_rate": 1.7232784533631148e-05,
"loss": 1.2724,
"step": 474
},
{
"epoch": 0.8,
"grad_norm": 0.15948258340358734,
"learning_rate": 1.7220239715697455e-05,
"loss": 1.2742,
"step": 475
},
{
"epoch": 0.8,
"grad_norm": 0.1488332599401474,
"learning_rate": 1.7207671115285577e-05,
"loss": 1.3531,
"step": 476
},
{
"epoch": 0.8,
"grad_norm": 0.1468440741300583,
"learning_rate": 1.7195078773794756e-05,
"loss": 1.2834,
"step": 477
},
{
"epoch": 0.8,
"grad_norm": 0.14220255613327026,
"learning_rate": 1.7182462732702442e-05,
"loss": 1.2984,
"step": 478
},
{
"epoch": 0.8,
"grad_norm": 0.1553741842508316,
"learning_rate": 1.7169823033564137e-05,
"loss": 1.3449,
"step": 479
},
{
"epoch": 0.81,
"grad_norm": 0.15940803289413452,
"learning_rate": 1.7157159718013275e-05,
"loss": 1.3179,
"step": 480
},
{
"epoch": 0.81,
"grad_norm": 0.16116777062416077,
"learning_rate": 1.714447282776108e-05,
"loss": 1.3078,
"step": 481
},
{
"epoch": 0.81,
"grad_norm": 0.16338405013084412,
"learning_rate": 1.7131762404596424e-05,
"loss": 1.3704,
"step": 482
},
{
"epoch": 0.81,
"grad_norm": 0.14362382888793945,
"learning_rate": 1.7119028490385703e-05,
"loss": 1.2434,
"step": 483
},
{
"epoch": 0.81,
"grad_norm": 0.16018328070640564,
"learning_rate": 1.7106271127072672e-05,
"loss": 1.3293,
"step": 484
},
{
"epoch": 0.81,
"grad_norm": 0.15323656797409058,
"learning_rate": 1.7093490356678338e-05,
"loss": 1.3381,
"step": 485
},
{
"epoch": 0.82,
"grad_norm": 0.14089493453502655,
"learning_rate": 1.7080686221300804e-05,
"loss": 1.269,
"step": 486
},
{
"epoch": 0.82,
"grad_norm": 0.15111474692821503,
"learning_rate": 1.706785876311513e-05,
"loss": 1.3165,
"step": 487
},
{
"epoch": 0.82,
"grad_norm": 0.1574653536081314,
"learning_rate": 1.7055008024373214e-05,
"loss": 1.3204,
"step": 488
},
{
"epoch": 0.82,
"grad_norm": 0.15187577903270721,
"learning_rate": 1.7042134047403613e-05,
"loss": 1.3261,
"step": 489
},
{
"epoch": 0.82,
"grad_norm": 0.15277227759361267,
"learning_rate": 1.7029236874611444e-05,
"loss": 1.3667,
"step": 490
},
{
"epoch": 0.82,
"grad_norm": 0.17707929015159607,
"learning_rate": 1.7016316548478217e-05,
"loss": 1.3065,
"step": 491
},
{
"epoch": 0.83,
"grad_norm": 0.1623065024614334,
"learning_rate": 1.700337311156172e-05,
"loss": 1.3231,
"step": 492
},
{
"epoch": 0.83,
"grad_norm": 0.17087838053703308,
"learning_rate": 1.6990406606495854e-05,
"loss": 1.3201,
"step": 493
},
{
"epoch": 0.83,
"grad_norm": 0.16629967093467712,
"learning_rate": 1.6977417075990508e-05,
"loss": 1.3758,
"step": 494
},
{
"epoch": 0.83,
"grad_norm": 0.14834420382976532,
"learning_rate": 1.696440456283141e-05,
"loss": 1.3256,
"step": 495
},
{
"epoch": 0.83,
"grad_norm": 0.1470130831003189,
"learning_rate": 1.6951369109879987e-05,
"loss": 1.2848,
"step": 496
},
{
"epoch": 0.83,
"grad_norm": 0.14451508224010468,
"learning_rate": 1.693831076007324e-05,
"loss": 1.2913,
"step": 497
},
{
"epoch": 0.84,
"grad_norm": 0.15200211107730865,
"learning_rate": 1.692522955642357e-05,
"loss": 1.3047,
"step": 498
},
{
"epoch": 0.84,
"grad_norm": 0.14897271990776062,
"learning_rate": 1.6912125542018675e-05,
"loss": 1.2377,
"step": 499
},
{
"epoch": 0.84,
"grad_norm": 0.1497008055448532,
"learning_rate": 1.6898998760021376e-05,
"loss": 1.3308,
"step": 500
},
{
"epoch": 0.84,
"grad_norm": 0.15045613050460815,
"learning_rate": 1.6885849253669492e-05,
"loss": 1.3119,
"step": 501
},
{
"epoch": 0.84,
"grad_norm": 0.15971465408802032,
"learning_rate": 1.6872677066275685e-05,
"loss": 1.342,
"step": 502
},
{
"epoch": 0.85,
"grad_norm": 0.15068389475345612,
"learning_rate": 1.6859482241227337e-05,
"loss": 1.3444,
"step": 503
},
{
"epoch": 0.85,
"grad_norm": 0.14832505583763123,
"learning_rate": 1.684626482198639e-05,
"loss": 1.2658,
"step": 504
},
{
"epoch": 0.85,
"grad_norm": 0.16742801666259766,
"learning_rate": 1.6833024852089207e-05,
"loss": 1.2719,
"step": 505
},
{
"epoch": 0.85,
"grad_norm": 0.14772577583789825,
"learning_rate": 1.6819762375146427e-05,
"loss": 1.3055,
"step": 506
},
{
"epoch": 0.85,
"grad_norm": 0.1421637088060379,
"learning_rate": 1.6806477434842835e-05,
"loss": 1.3252,
"step": 507
},
{
"epoch": 0.85,
"grad_norm": 0.15263010561466217,
"learning_rate": 1.6793170074937196e-05,
"loss": 1.3379,
"step": 508
},
{
"epoch": 0.86,
"grad_norm": 0.1491978019475937,
"learning_rate": 1.6779840339262128e-05,
"loss": 1.3355,
"step": 509
},
{
"epoch": 0.86,
"grad_norm": 0.16020318865776062,
"learning_rate": 1.6766488271723948e-05,
"loss": 1.3056,
"step": 510
},
{
"epoch": 0.86,
"grad_norm": 0.1441843956708908,
"learning_rate": 1.675311391630253e-05,
"loss": 1.2625,
"step": 511
},
{
"epoch": 0.86,
"grad_norm": 0.15116120874881744,
"learning_rate": 1.6739717317051173e-05,
"loss": 1.3625,
"step": 512
},
{
"epoch": 0.86,
"grad_norm": 0.16190451383590698,
"learning_rate": 1.6726298518096427e-05,
"loss": 1.3478,
"step": 513
},
{
"epoch": 0.86,
"grad_norm": 0.2721071243286133,
"learning_rate": 1.671285756363798e-05,
"loss": 1.278,
"step": 514
},
{
"epoch": 0.87,
"grad_norm": 0.15258455276489258,
"learning_rate": 1.6699394497948486e-05,
"loss": 1.3409,
"step": 515
},
{
"epoch": 0.87,
"grad_norm": 0.1480305790901184,
"learning_rate": 1.6685909365373435e-05,
"loss": 1.2828,
"step": 516
},
{
"epoch": 0.87,
"grad_norm": 0.15107598900794983,
"learning_rate": 1.6672402210331002e-05,
"loss": 1.3185,
"step": 517
},
{
"epoch": 0.87,
"grad_norm": 0.16138269007205963,
"learning_rate": 1.6658873077311902e-05,
"loss": 1.2488,
"step": 518
},
{
"epoch": 0.87,
"grad_norm": 0.1458127200603485,
"learning_rate": 1.6645322010879242e-05,
"loss": 1.3045,
"step": 519
},
{
"epoch": 0.87,
"grad_norm": 0.1560029536485672,
"learning_rate": 1.663174905566837e-05,
"loss": 1.3145,
"step": 520
},
{
"epoch": 0.88,
"grad_norm": 0.15814390778541565,
"learning_rate": 1.6618154256386746e-05,
"loss": 1.2516,
"step": 521
},
{
"epoch": 0.88,
"grad_norm": 0.1609438955783844,
"learning_rate": 1.660453765781376e-05,
"loss": 1.3517,
"step": 522
},
{
"epoch": 0.88,
"grad_norm": 0.1503395289182663,
"learning_rate": 1.6590899304800626e-05,
"loss": 1.2836,
"step": 523
},
{
"epoch": 0.88,
"grad_norm": 0.15777058899402618,
"learning_rate": 1.6577239242270208e-05,
"loss": 1.3166,
"step": 524
},
{
"epoch": 0.88,
"grad_norm": 0.15574100613594055,
"learning_rate": 1.6563557515216868e-05,
"loss": 1.3431,
"step": 525
},
{
"epoch": 0.88,
"grad_norm": 0.31943216919898987,
"learning_rate": 1.654985416870634e-05,
"loss": 1.3003,
"step": 526
},
{
"epoch": 0.89,
"grad_norm": 0.1493591070175171,
"learning_rate": 1.6536129247875568e-05,
"loss": 1.3107,
"step": 527
},
{
"epoch": 0.89,
"grad_norm": 0.16144278645515442,
"learning_rate": 1.6522382797932555e-05,
"loss": 1.3139,
"step": 528
},
{
"epoch": 0.89,
"grad_norm": 0.15227490663528442,
"learning_rate": 1.6508614864156218e-05,
"loss": 1.3107,
"step": 529
},
{
"epoch": 0.89,
"grad_norm": 0.1584673821926117,
"learning_rate": 1.6494825491896246e-05,
"loss": 1.3199,
"step": 530
},
{
"epoch": 0.89,
"grad_norm": 0.16497363150119781,
"learning_rate": 1.6481014726572924e-05,
"loss": 1.3562,
"step": 531
},
{
"epoch": 0.89,
"grad_norm": 0.15551984310150146,
"learning_rate": 1.6467182613677035e-05,
"loss": 1.2964,
"step": 532
},
{
"epoch": 0.9,
"grad_norm": 0.15545369684696198,
"learning_rate": 1.645332919876965e-05,
"loss": 1.3617,
"step": 533
},
{
"epoch": 0.9,
"grad_norm": 0.15759220719337463,
"learning_rate": 1.6439454527482014e-05,
"loss": 1.3197,
"step": 534
},
{
"epoch": 0.9,
"grad_norm": 0.16849841177463531,
"learning_rate": 1.642555864551539e-05,
"loss": 1.3545,
"step": 535
},
{
"epoch": 0.9,
"grad_norm": 0.16088669002056122,
"learning_rate": 1.6411641598640908e-05,
"loss": 1.3041,
"step": 536
},
{
"epoch": 0.9,
"grad_norm": 0.18748100101947784,
"learning_rate": 1.6397703432699404e-05,
"loss": 1.3261,
"step": 537
},
{
"epoch": 0.9,
"grad_norm": 0.15511862933635712,
"learning_rate": 1.6383744193601293e-05,
"loss": 1.2566,
"step": 538
},
{
"epoch": 0.91,
"grad_norm": 0.15378575026988983,
"learning_rate": 1.6369763927326378e-05,
"loss": 1.3685,
"step": 539
},
{
"epoch": 0.91,
"grad_norm": 0.15075547993183136,
"learning_rate": 1.6355762679923744e-05,
"loss": 1.3565,
"step": 540
},
{
"epoch": 0.91,
"grad_norm": 0.17224986851215363,
"learning_rate": 1.6341740497511584e-05,
"loss": 1.3599,
"step": 541
},
{
"epoch": 0.91,
"grad_norm": 0.16651223599910736,
"learning_rate": 1.632769742627703e-05,
"loss": 1.2676,
"step": 542
},
{
"epoch": 0.91,
"grad_norm": 0.1479247361421585,
"learning_rate": 1.6313633512476034e-05,
"loss": 1.3511,
"step": 543
},
{
"epoch": 0.91,
"grad_norm": 0.14475040137767792,
"learning_rate": 1.62995488024332e-05,
"loss": 1.281,
"step": 544
},
{
"epoch": 0.92,
"grad_norm": 0.14610210061073303,
"learning_rate": 1.628544334254162e-05,
"loss": 1.2803,
"step": 545
},
{
"epoch": 0.92,
"grad_norm": 0.15002188086509705,
"learning_rate": 1.6271317179262754e-05,
"loss": 1.3114,
"step": 546
},
{
"epoch": 0.92,
"grad_norm": 0.15048658847808838,
"learning_rate": 1.6257170359126237e-05,
"loss": 1.342,
"step": 547
},
{
"epoch": 0.92,
"grad_norm": 0.14822815358638763,
"learning_rate": 1.6243002928729752e-05,
"loss": 1.3811,
"step": 548
},
{
"epoch": 0.92,
"grad_norm": 0.14471104741096497,
"learning_rate": 1.6228814934738873e-05,
"loss": 1.3415,
"step": 549
},
{
"epoch": 0.92,
"grad_norm": 0.16976451873779297,
"learning_rate": 1.6214606423886903e-05,
"loss": 1.246,
"step": 550
},
{
"epoch": 0.93,
"grad_norm": 0.15606847405433655,
"learning_rate": 1.6200377442974723e-05,
"loss": 1.3191,
"step": 551
},
{
"epoch": 0.93,
"grad_norm": 0.15066905319690704,
"learning_rate": 1.6186128038870644e-05,
"loss": 1.3419,
"step": 552
},
{
"epoch": 0.93,
"grad_norm": 0.1576036810874939,
"learning_rate": 1.6171858258510246e-05,
"loss": 1.2499,
"step": 553
},
{
"epoch": 0.93,
"grad_norm": 0.16077366471290588,
"learning_rate": 1.6157568148896227e-05,
"loss": 1.2236,
"step": 554
},
{
"epoch": 0.93,
"grad_norm": 0.15549123287200928,
"learning_rate": 1.6143257757098242e-05,
"loss": 1.2914,
"step": 555
},
{
"epoch": 0.93,
"grad_norm": 0.15645426511764526,
"learning_rate": 1.6128927130252764e-05,
"loss": 1.344,
"step": 556
},
{
"epoch": 0.94,
"grad_norm": 0.1562529355287552,
"learning_rate": 1.61145763155629e-05,
"loss": 1.3898,
"step": 557
},
{
"epoch": 0.94,
"grad_norm": 0.14849528670310974,
"learning_rate": 1.6100205360298277e-05,
"loss": 1.2746,
"step": 558
},
{
"epoch": 0.94,
"grad_norm": 0.15014708042144775,
"learning_rate": 1.6085814311794833e-05,
"loss": 1.355,
"step": 559
},
{
"epoch": 0.94,
"grad_norm": 0.15373437106609344,
"learning_rate": 1.607140321745472e-05,
"loss": 1.3893,
"step": 560
},
{
"epoch": 0.94,
"grad_norm": 0.14762701094150543,
"learning_rate": 1.605697212474609e-05,
"loss": 1.2868,
"step": 561
},
{
"epoch": 0.94,
"grad_norm": 0.16601255536079407,
"learning_rate": 1.604252108120299e-05,
"loss": 1.3123,
"step": 562
},
{
"epoch": 0.95,
"grad_norm": 0.15280282497406006,
"learning_rate": 1.6028050134425172e-05,
"loss": 1.2975,
"step": 563
},
{
"epoch": 0.95,
"grad_norm": 0.15927375853061676,
"learning_rate": 1.6013559332077945e-05,
"loss": 1.371,
"step": 564
},
{
"epoch": 0.95,
"grad_norm": 0.22685493528842926,
"learning_rate": 1.5999048721892024e-05,
"loss": 1.3319,
"step": 565
},
{
"epoch": 0.95,
"grad_norm": 0.16215336322784424,
"learning_rate": 1.5984518351663365e-05,
"loss": 1.3597,
"step": 566
},
{
"epoch": 0.95,
"grad_norm": 0.15450100600719452,
"learning_rate": 1.5969968269253014e-05,
"loss": 1.3647,
"step": 567
},
{
"epoch": 0.95,
"grad_norm": 0.156316339969635,
"learning_rate": 1.5955398522586942e-05,
"loss": 1.3858,
"step": 568
},
{
"epoch": 0.96,
"grad_norm": 0.1615144908428192,
"learning_rate": 1.5940809159655898e-05,
"loss": 1.3203,
"step": 569
},
{
"epoch": 0.96,
"grad_norm": 0.1442023366689682,
"learning_rate": 1.5926200228515235e-05,
"loss": 1.3644,
"step": 570
},
{
"epoch": 0.96,
"grad_norm": 0.15228597819805145,
"learning_rate": 1.591157177728477e-05,
"loss": 1.2107,
"step": 571
},
{
"epoch": 0.96,
"grad_norm": 0.15035109221935272,
"learning_rate": 1.5896923854148613e-05,
"loss": 1.2465,
"step": 572
},
{
"epoch": 0.96,
"grad_norm": 0.1499544084072113,
"learning_rate": 1.588225650735501e-05,
"loss": 1.3081,
"step": 573
},
{
"epoch": 0.96,
"grad_norm": 0.152054563164711,
"learning_rate": 1.5867569785216187e-05,
"loss": 1.2955,
"step": 574
},
{
"epoch": 0.97,
"grad_norm": 0.15024514496326447,
"learning_rate": 1.585286373610819e-05,
"loss": 1.2715,
"step": 575
},
{
"epoch": 0.97,
"grad_norm": 0.15844179689884186,
"learning_rate": 1.5838138408470723e-05,
"loss": 1.3345,
"step": 576
},
{
"epoch": 0.97,
"grad_norm": 0.1464165300130844,
"learning_rate": 1.5823393850807e-05,
"loss": 1.2863,
"step": 577
},
{
"epoch": 0.97,
"grad_norm": 0.16190552711486816,
"learning_rate": 1.5808630111683566e-05,
"loss": 1.2433,
"step": 578
},
{
"epoch": 0.97,
"grad_norm": 0.15056265890598297,
"learning_rate": 1.5793847239730148e-05,
"loss": 1.3604,
"step": 579
},
{
"epoch": 0.97,
"grad_norm": 0.1699298769235611,
"learning_rate": 1.5779045283639495e-05,
"loss": 1.3728,
"step": 580
},
{
"epoch": 0.98,
"grad_norm": 0.1500501036643982,
"learning_rate": 1.5764224292167223e-05,
"loss": 1.3597,
"step": 581
},
{
"epoch": 0.98,
"grad_norm": 0.15492461621761322,
"learning_rate": 1.5749384314131643e-05,
"loss": 1.312,
"step": 582
},
{
"epoch": 0.98,
"grad_norm": 0.15505270659923553,
"learning_rate": 1.5734525398413597e-05,
"loss": 1.3137,
"step": 583
},
{
"epoch": 0.98,
"grad_norm": 0.14275893568992615,
"learning_rate": 1.5719647593956328e-05,
"loss": 1.283,
"step": 584
},
{
"epoch": 0.98,
"grad_norm": 0.14876849949359894,
"learning_rate": 1.5704750949765263e-05,
"loss": 1.3035,
"step": 585
},
{
"epoch": 0.98,
"grad_norm": 0.1965971291065216,
"learning_rate": 1.5689835514907916e-05,
"loss": 1.258,
"step": 586
},
{
"epoch": 0.99,
"grad_norm": 0.14744770526885986,
"learning_rate": 1.5674901338513672e-05,
"loss": 1.3358,
"step": 587
},
{
"epoch": 0.99,
"grad_norm": 0.1537308543920517,
"learning_rate": 1.5659948469773665e-05,
"loss": 1.3796,
"step": 588
},
{
"epoch": 0.99,
"grad_norm": 0.15412989258766174,
"learning_rate": 1.5644976957940586e-05,
"loss": 1.2337,
"step": 589
},
{
"epoch": 0.99,
"grad_norm": 0.15361419320106506,
"learning_rate": 1.5629986852328546e-05,
"loss": 1.3013,
"step": 590
},
{
"epoch": 0.99,
"grad_norm": 0.15221114456653595,
"learning_rate": 1.5614978202312886e-05,
"loss": 1.2744,
"step": 591
},
{
"epoch": 0.99,
"grad_norm": 0.2714800238609314,
"learning_rate": 1.559995105733004e-05,
"loss": 1.2738,
"step": 592
},
{
"epoch": 1.0,
"grad_norm": 0.15101397037506104,
"learning_rate": 1.558490546687737e-05,
"loss": 1.3258,
"step": 593
},
{
"epoch": 1.0,
"grad_norm": 0.1766865998506546,
"learning_rate": 1.5569841480512972e-05,
"loss": 1.2578,
"step": 594
},
{
"epoch": 1.0,
"grad_norm": 0.15496864914894104,
"learning_rate": 1.5554759147855554e-05,
"loss": 1.315,
"step": 595
},
{
"epoch": 1.0,
"grad_norm": 0.20621678233146667,
"learning_rate": 1.5539658518584248e-05,
"loss": 1.2806,
"step": 596
},
{
"epoch": 1.0,
"grad_norm": 0.18250219523906708,
"learning_rate": 1.552453964243845e-05,
"loss": 1.1638,
"step": 597
},
{
"epoch": 1.0,
"grad_norm": 0.15596213936805725,
"learning_rate": 1.5509402569217677e-05,
"loss": 1.2473,
"step": 598
},
{
"epoch": 1.01,
"grad_norm": 0.15193147957324982,
"learning_rate": 1.549424734878135e-05,
"loss": 1.1939,
"step": 599
},
{
"epoch": 1.01,
"grad_norm": 0.1586681604385376,
"learning_rate": 1.5479074031048695e-05,
"loss": 1.2696,
"step": 600
},
{
"epoch": 1.01,
"grad_norm": 0.14868341386318207,
"learning_rate": 1.5463882665998534e-05,
"loss": 1.1631,
"step": 601
},
{
"epoch": 1.01,
"grad_norm": 0.16768112778663635,
"learning_rate": 1.544867330366914e-05,
"loss": 1.2179,
"step": 602
},
{
"epoch": 1.01,
"grad_norm": 0.15845203399658203,
"learning_rate": 1.5433445994158057e-05,
"loss": 1.1844,
"step": 603
},
{
"epoch": 1.01,
"grad_norm": 0.16251659393310547,
"learning_rate": 1.5418200787621966e-05,
"loss": 1.2553,
"step": 604
},
{
"epoch": 1.02,
"grad_norm": 0.16165347397327423,
"learning_rate": 1.5402937734276466e-05,
"loss": 1.2155,
"step": 605
},
{
"epoch": 1.02,
"grad_norm": 0.15819703042507172,
"learning_rate": 1.5387656884395972e-05,
"loss": 1.2607,
"step": 606
},
{
"epoch": 1.02,
"grad_norm": 0.16347314417362213,
"learning_rate": 1.53723582883135e-05,
"loss": 1.1738,
"step": 607
},
{
"epoch": 1.02,
"grad_norm": 0.16803783178329468,
"learning_rate": 1.5357041996420527e-05,
"loss": 1.2647,
"step": 608
},
{
"epoch": 1.02,
"grad_norm": 0.15513284504413605,
"learning_rate": 1.534170805916681e-05,
"loss": 1.238,
"step": 609
},
{
"epoch": 1.02,
"grad_norm": 0.1656567007303238,
"learning_rate": 1.5326356527060243e-05,
"loss": 1.2051,
"step": 610
},
{
"epoch": 1.03,
"grad_norm": 0.16050614416599274,
"learning_rate": 1.5310987450666655e-05,
"loss": 1.1976,
"step": 611
},
{
"epoch": 1.03,
"grad_norm": 0.1643039584159851,
"learning_rate": 1.529560088060968e-05,
"loss": 1.1953,
"step": 612
},
{
"epoch": 1.03,
"grad_norm": 0.18104927241802216,
"learning_rate": 1.5280196867570558e-05,
"loss": 1.2178,
"step": 613
},
{
"epoch": 1.03,
"grad_norm": 0.16460387408733368,
"learning_rate": 1.5264775462288002e-05,
"loss": 1.282,
"step": 614
},
{
"epoch": 1.03,
"grad_norm": 0.1727691888809204,
"learning_rate": 1.5249336715558003e-05,
"loss": 1.2172,
"step": 615
},
{
"epoch": 1.03,
"grad_norm": 0.16987302899360657,
"learning_rate": 1.5233880678233665e-05,
"loss": 1.2261,
"step": 616
},
{
"epoch": 1.04,
"grad_norm": 0.20719866454601288,
"learning_rate": 1.5218407401225057e-05,
"loss": 1.2258,
"step": 617
},
{
"epoch": 1.04,
"grad_norm": 0.16289398074150085,
"learning_rate": 1.5202916935499032e-05,
"loss": 1.2115,
"step": 618
},
{
"epoch": 1.04,
"grad_norm": 0.16636596620082855,
"learning_rate": 1.5187409332079058e-05,
"loss": 1.2355,
"step": 619
},
{
"epoch": 1.04,
"grad_norm": 0.16421528160572052,
"learning_rate": 1.517188464204504e-05,
"loss": 1.2748,
"step": 620
},
{
"epoch": 1.04,
"grad_norm": 0.17175228893756866,
"learning_rate": 1.5156342916533187e-05,
"loss": 1.2503,
"step": 621
},
{
"epoch": 1.04,
"grad_norm": 0.18068252503871918,
"learning_rate": 1.5140784206735803e-05,
"loss": 1.1712,
"step": 622
},
{
"epoch": 1.05,
"grad_norm": 0.1650371551513672,
"learning_rate": 1.5125208563901143e-05,
"loss": 1.2213,
"step": 623
},
{
"epoch": 1.05,
"grad_norm": 0.16020077466964722,
"learning_rate": 1.510961603933324e-05,
"loss": 1.1781,
"step": 624
},
{
"epoch": 1.05,
"grad_norm": 0.16656441986560822,
"learning_rate": 1.509400668439172e-05,
"loss": 1.2337,
"step": 625
},
{
"epoch": 1.05,
"grad_norm": 0.18046331405639648,
"learning_rate": 1.507838055049166e-05,
"loss": 1.253,
"step": 626
},
{
"epoch": 1.05,
"grad_norm": 0.20493903756141663,
"learning_rate": 1.5062737689103399e-05,
"loss": 1.2213,
"step": 627
},
{
"epoch": 1.06,
"grad_norm": 0.1694127321243286,
"learning_rate": 1.5047078151752375e-05,
"loss": 1.2316,
"step": 628
},
{
"epoch": 1.06,
"grad_norm": 0.7030985951423645,
"learning_rate": 1.5031401990018955e-05,
"loss": 1.2251,
"step": 629
},
{
"epoch": 1.06,
"grad_norm": 0.2330465018749237,
"learning_rate": 1.5015709255538258e-05,
"loss": 1.2636,
"step": 630
},
{
"epoch": 1.06,
"grad_norm": 0.165618896484375,
"learning_rate": 1.5000000000000002e-05,
"loss": 1.2023,
"step": 631
},
{
"epoch": 1.06,
"grad_norm": 0.16244085133075714,
"learning_rate": 1.4984274275148316e-05,
"loss": 1.2001,
"step": 632
},
{
"epoch": 1.06,
"grad_norm": 0.1721733659505844,
"learning_rate": 1.4968532132781573e-05,
"loss": 1.1674,
"step": 633
},
{
"epoch": 1.07,
"grad_norm": 0.17082570493221283,
"learning_rate": 1.4952773624752237e-05,
"loss": 1.1891,
"step": 634
},
{
"epoch": 1.07,
"grad_norm": 0.17394006252288818,
"learning_rate": 1.4936998802966667e-05,
"loss": 1.0953,
"step": 635
},
{
"epoch": 1.07,
"grad_norm": 0.1760803908109665,
"learning_rate": 1.4921207719384956e-05,
"loss": 1.2588,
"step": 636
},
{
"epoch": 1.07,
"grad_norm": 0.17880435287952423,
"learning_rate": 1.4905400426020769e-05,
"loss": 1.2336,
"step": 637
},
{
"epoch": 1.07,
"grad_norm": 0.16551633179187775,
"learning_rate": 1.4889576974941158e-05,
"loss": 1.1925,
"step": 638
},
{
"epoch": 1.07,
"grad_norm": 0.1722973734140396,
"learning_rate": 1.4873737418266398e-05,
"loss": 1.2335,
"step": 639
},
{
"epoch": 1.08,
"grad_norm": 0.17228730022907257,
"learning_rate": 1.4857881808169814e-05,
"loss": 1.1684,
"step": 640
},
{
"epoch": 1.08,
"grad_norm": 0.1721390336751938,
"learning_rate": 1.484201019687761e-05,
"loss": 1.2434,
"step": 641
},
{
"epoch": 1.08,
"grad_norm": 0.16736550629138947,
"learning_rate": 1.4826122636668692e-05,
"loss": 1.253,
"step": 642
},
{
"epoch": 1.08,
"grad_norm": 0.17385610938072205,
"learning_rate": 1.4810219179874502e-05,
"loss": 1.2442,
"step": 643
},
{
"epoch": 1.08,
"grad_norm": 0.3095041513442993,
"learning_rate": 1.4794299878878844e-05,
"loss": 1.2175,
"step": 644
},
{
"epoch": 1.08,
"grad_norm": 0.17718347907066345,
"learning_rate": 1.4778364786117708e-05,
"loss": 1.2667,
"step": 645
},
{
"epoch": 1.09,
"grad_norm": 0.17453229427337646,
"learning_rate": 1.47624139540791e-05,
"loss": 1.2254,
"step": 646
},
{
"epoch": 1.09,
"grad_norm": 0.6119077205657959,
"learning_rate": 1.474644743530288e-05,
"loss": 1.2668,
"step": 647
},
{
"epoch": 1.09,
"grad_norm": 0.17594890296459198,
"learning_rate": 1.4730465282380557e-05,
"loss": 1.241,
"step": 648
},
{
"epoch": 1.09,
"grad_norm": 0.18338783085346222,
"learning_rate": 1.4714467547955152e-05,
"loss": 1.226,
"step": 649
},
{
"epoch": 1.09,
"grad_norm": 0.1668035387992859,
"learning_rate": 1.4698454284721003e-05,
"loss": 1.2486,
"step": 650
},
{
"epoch": 1.09,
"grad_norm": 0.17815056443214417,
"learning_rate": 1.468242554542361e-05,
"loss": 1.211,
"step": 651
},
{
"epoch": 1.1,
"grad_norm": 0.17628586292266846,
"learning_rate": 1.466638138285943e-05,
"loss": 1.219,
"step": 652
},
{
"epoch": 1.1,
"grad_norm": 0.17057868838310242,
"learning_rate": 1.4650321849875732e-05,
"loss": 1.1731,
"step": 653
},
{
"epoch": 1.1,
"grad_norm": 0.168094664812088,
"learning_rate": 1.4634246999370415e-05,
"loss": 1.1997,
"step": 654
},
{
"epoch": 1.1,
"grad_norm": 0.19705113768577576,
"learning_rate": 1.4618156884291832e-05,
"loss": 1.186,
"step": 655
},
{
"epoch": 1.1,
"grad_norm": 0.1883486658334732,
"learning_rate": 1.4602051557638604e-05,
"loss": 1.2599,
"step": 656
},
{
"epoch": 1.1,
"grad_norm": 0.1927536576986313,
"learning_rate": 1.4585931072459474e-05,
"loss": 1.2226,
"step": 657
},
{
"epoch": 1.11,
"grad_norm": 0.17612691223621368,
"learning_rate": 1.4569795481853102e-05,
"loss": 1.2138,
"step": 658
},
{
"epoch": 1.11,
"grad_norm": 0.18257959187030792,
"learning_rate": 1.455364483896791e-05,
"loss": 1.3126,
"step": 659
},
{
"epoch": 1.11,
"grad_norm": 0.1727372258901596,
"learning_rate": 1.4537479197001898e-05,
"loss": 1.1718,
"step": 660
},
{
"epoch": 1.11,
"grad_norm": 0.18891093134880066,
"learning_rate": 1.4521298609202468e-05,
"loss": 1.2535,
"step": 661
},
{
"epoch": 1.11,
"grad_norm": 0.170741468667984,
"learning_rate": 1.4505103128866255e-05,
"loss": 1.2126,
"step": 662
},
{
"epoch": 1.11,
"grad_norm": 0.1732681393623352,
"learning_rate": 1.4488892809338952e-05,
"loss": 1.1921,
"step": 663
},
{
"epoch": 1.12,
"grad_norm": 0.18771351873874664,
"learning_rate": 1.4472667704015125e-05,
"loss": 1.2052,
"step": 664
},
{
"epoch": 1.12,
"grad_norm": 0.1787664145231247,
"learning_rate": 1.4456427866338034e-05,
"loss": 1.164,
"step": 665
},
{
"epoch": 1.12,
"grad_norm": 0.18160726130008698,
"learning_rate": 1.4440173349799483e-05,
"loss": 1.2136,
"step": 666
},
{
"epoch": 1.12,
"grad_norm": 0.1752696931362152,
"learning_rate": 1.4423904207939616e-05,
"loss": 1.2399,
"step": 667
},
{
"epoch": 1.12,
"grad_norm": 0.17932309210300446,
"learning_rate": 1.4407620494346755e-05,
"loss": 1.1854,
"step": 668
},
{
"epoch": 1.12,
"grad_norm": 0.1716100573539734,
"learning_rate": 1.4391322262657206e-05,
"loss": 1.2125,
"step": 669
},
{
"epoch": 1.13,
"grad_norm": 0.1736980676651001,
"learning_rate": 1.4375009566555119e-05,
"loss": 1.2046,
"step": 670
},
{
"epoch": 1.13,
"grad_norm": 0.18325597047805786,
"learning_rate": 1.4358682459772266e-05,
"loss": 1.1586,
"step": 671
},
{
"epoch": 1.13,
"grad_norm": 0.16865943372249603,
"learning_rate": 1.43423409960879e-05,
"loss": 1.2141,
"step": 672
},
{
"epoch": 1.13,
"grad_norm": 0.1696828305721283,
"learning_rate": 1.4325985229328557e-05,
"loss": 1.1698,
"step": 673
},
{
"epoch": 1.13,
"grad_norm": 0.18146204948425293,
"learning_rate": 1.4309615213367884e-05,
"loss": 1.2123,
"step": 674
},
{
"epoch": 1.13,
"grad_norm": 0.16776145994663239,
"learning_rate": 1.4293231002126472e-05,
"loss": 1.1712,
"step": 675
},
{
"epoch": 1.14,
"grad_norm": 0.2249058187007904,
"learning_rate": 1.4276832649571657e-05,
"loss": 1.2904,
"step": 676
},
{
"epoch": 1.14,
"grad_norm": 0.2575734853744507,
"learning_rate": 1.4260420209717363e-05,
"loss": 1.1487,
"step": 677
},
{
"epoch": 1.14,
"grad_norm": 0.1715114563703537,
"learning_rate": 1.424399373662391e-05,
"loss": 1.2264,
"step": 678
},
{
"epoch": 1.14,
"grad_norm": 0.17243024706840515,
"learning_rate": 1.4227553284397853e-05,
"loss": 1.2332,
"step": 679
},
{
"epoch": 1.14,
"grad_norm": 0.7421213984489441,
"learning_rate": 1.4211098907191778e-05,
"loss": 1.2128,
"step": 680
},
{
"epoch": 1.14,
"grad_norm": 0.1757201850414276,
"learning_rate": 1.4194630659204145e-05,
"loss": 1.2164,
"step": 681
},
{
"epoch": 1.15,
"grad_norm": 0.21629256010055542,
"learning_rate": 1.4178148594679098e-05,
"loss": 1.2803,
"step": 682
},
{
"epoch": 1.15,
"grad_norm": 0.4631064236164093,
"learning_rate": 1.41616527679063e-05,
"loss": 1.181,
"step": 683
},
{
"epoch": 1.15,
"grad_norm": 0.17093178629875183,
"learning_rate": 1.4145143233220741e-05,
"loss": 1.2279,
"step": 684
},
{
"epoch": 1.15,
"grad_norm": 0.17908430099487305,
"learning_rate": 1.4128620045002552e-05,
"loss": 1.2607,
"step": 685
},
{
"epoch": 1.15,
"grad_norm": 0.1770493984222412,
"learning_rate": 1.4112083257676857e-05,
"loss": 1.2513,
"step": 686
},
{
"epoch": 1.15,
"grad_norm": 0.1774083524942398,
"learning_rate": 1.4095532925713553e-05,
"loss": 1.2763,
"step": 687
},
{
"epoch": 1.16,
"grad_norm": 0.17406463623046875,
"learning_rate": 1.4078969103627167e-05,
"loss": 1.244,
"step": 688
},
{
"epoch": 1.16,
"grad_norm": 0.1874096840620041,
"learning_rate": 1.4062391845976653e-05,
"loss": 1.2294,
"step": 689
},
{
"epoch": 1.16,
"grad_norm": 0.18565060198307037,
"learning_rate": 1.4045801207365222e-05,
"loss": 1.2483,
"step": 690
},
{
"epoch": 1.16,
"grad_norm": 0.20687486231327057,
"learning_rate": 1.4029197242440158e-05,
"loss": 1.2301,
"step": 691
},
{
"epoch": 1.16,
"grad_norm": 0.1744285523891449,
"learning_rate": 1.401258000589264e-05,
"loss": 1.1752,
"step": 692
},
{
"epoch": 1.16,
"grad_norm": 0.17503005266189575,
"learning_rate": 1.3995949552457568e-05,
"loss": 1.1985,
"step": 693
},
{
"epoch": 1.17,
"grad_norm": 0.174129918217659,
"learning_rate": 1.397930593691337e-05,
"loss": 1.2038,
"step": 694
},
{
"epoch": 1.17,
"grad_norm": 0.16887657344341278,
"learning_rate": 1.3962649214081828e-05,
"loss": 1.2517,
"step": 695
},
{
"epoch": 1.17,
"grad_norm": 0.1817820817232132,
"learning_rate": 1.3945979438827903e-05,
"loss": 1.2252,
"step": 696
},
{
"epoch": 1.17,
"grad_norm": 0.19161197543144226,
"learning_rate": 1.3929296666059545e-05,
"loss": 1.2213,
"step": 697
},
{
"epoch": 1.17,
"grad_norm": 0.1906740814447403,
"learning_rate": 1.3912600950727511e-05,
"loss": 1.2635,
"step": 698
},
{
"epoch": 1.17,
"grad_norm": 0.1750497668981552,
"learning_rate": 1.3895892347825205e-05,
"loss": 1.1946,
"step": 699
},
{
"epoch": 1.18,
"grad_norm": 0.18113519251346588,
"learning_rate": 1.3879170912388467e-05,
"loss": 1.2198,
"step": 700
},
{
"epoch": 1.18,
"grad_norm": 0.17936843633651733,
"learning_rate": 1.3862436699495401e-05,
"loss": 1.2095,
"step": 701
},
{
"epoch": 1.18,
"grad_norm": 0.18390525877475739,
"learning_rate": 1.3845689764266217e-05,
"loss": 1.2253,
"step": 702
},
{
"epoch": 1.18,
"grad_norm": 0.17131981253623962,
"learning_rate": 1.3828930161863015e-05,
"loss": 1.2132,
"step": 703
},
{
"epoch": 1.18,
"grad_norm": 0.21815598011016846,
"learning_rate": 1.3812157947489629e-05,
"loss": 1.1864,
"step": 704
},
{
"epoch": 1.18,
"grad_norm": 0.285260409116745,
"learning_rate": 1.3795373176391423e-05,
"loss": 1.2513,
"step": 705
},
{
"epoch": 1.19,
"grad_norm": 0.17937158048152924,
"learning_rate": 1.3778575903855131e-05,
"loss": 1.2771,
"step": 706
},
{
"epoch": 1.19,
"grad_norm": 0.17071524262428284,
"learning_rate": 1.3761766185208663e-05,
"loss": 1.2378,
"step": 707
},
{
"epoch": 1.19,
"grad_norm": 0.17919889092445374,
"learning_rate": 1.3744944075820926e-05,
"loss": 1.259,
"step": 708
},
{
"epoch": 1.19,
"grad_norm": 0.18037040531635284,
"learning_rate": 1.3728109631101631e-05,
"loss": 1.2428,
"step": 709
},
{
"epoch": 1.19,
"grad_norm": 0.1809706836938858,
"learning_rate": 1.3711262906501136e-05,
"loss": 1.193,
"step": 710
},
{
"epoch": 1.19,
"grad_norm": 0.1728934645652771,
"learning_rate": 1.3694403957510235e-05,
"loss": 1.2377,
"step": 711
},
{
"epoch": 1.2,
"grad_norm": 0.1798473298549652,
"learning_rate": 1.3677532839659993e-05,
"loss": 1.2693,
"step": 712
},
{
"epoch": 1.2,
"grad_norm": 0.20100121200084686,
"learning_rate": 1.3660649608521551e-05,
"loss": 1.1694,
"step": 713
},
{
"epoch": 1.2,
"grad_norm": 0.1727704554796219,
"learning_rate": 1.3643754319705956e-05,
"loss": 1.2541,
"step": 714
},
{
"epoch": 1.2,
"grad_norm": 0.17781519889831543,
"learning_rate": 1.3626847028863967e-05,
"loss": 1.1603,
"step": 715
},
{
"epoch": 1.2,
"grad_norm": 0.17592477798461914,
"learning_rate": 1.3609927791685889e-05,
"loss": 1.2208,
"step": 716
},
{
"epoch": 1.2,
"grad_norm": 0.1709340363740921,
"learning_rate": 1.3592996663901354e-05,
"loss": 1.2067,
"step": 717
},
{
"epoch": 1.21,
"grad_norm": 0.17522557079792023,
"learning_rate": 1.3576053701279175e-05,
"loss": 1.1681,
"step": 718
},
{
"epoch": 1.21,
"grad_norm": 0.17879725992679596,
"learning_rate": 1.3559098959627148e-05,
"loss": 1.1957,
"step": 719
},
{
"epoch": 1.21,
"grad_norm": 0.18600985407829285,
"learning_rate": 1.3542132494791865e-05,
"loss": 1.2807,
"step": 720
},
{
"epoch": 1.21,
"grad_norm": 0.17699839174747467,
"learning_rate": 1.3525154362658521e-05,
"loss": 1.24,
"step": 721
},
{
"epoch": 1.21,
"grad_norm": 0.17192897200584412,
"learning_rate": 1.3508164619150763e-05,
"loss": 1.1199,
"step": 722
},
{
"epoch": 1.21,
"grad_norm": 0.18780237436294556,
"learning_rate": 1.349116332023047e-05,
"loss": 1.2228,
"step": 723
},
{
"epoch": 1.22,
"grad_norm": 0.19598455727100372,
"learning_rate": 1.3474150521897584e-05,
"loss": 1.2424,
"step": 724
},
{
"epoch": 1.22,
"grad_norm": 0.18084251880645752,
"learning_rate": 1.345712628018993e-05,
"loss": 1.2508,
"step": 725
},
{
"epoch": 1.22,
"grad_norm": 0.17511381208896637,
"learning_rate": 1.3440090651183017e-05,
"loss": 1.2108,
"step": 726
},
{
"epoch": 1.22,
"grad_norm": 0.18235406279563904,
"learning_rate": 1.3423043690989874e-05,
"loss": 1.2105,
"step": 727
},
{
"epoch": 1.22,
"grad_norm": 0.174601748585701,
"learning_rate": 1.3405985455760843e-05,
"loss": 1.1501,
"step": 728
},
{
"epoch": 1.22,
"grad_norm": 0.17258746922016144,
"learning_rate": 1.3388916001683412e-05,
"loss": 1.1968,
"step": 729
},
{
"epoch": 1.23,
"grad_norm": 0.18527741730213165,
"learning_rate": 1.3371835384982014e-05,
"loss": 1.2005,
"step": 730
},
{
"epoch": 1.23,
"grad_norm": 0.18602068722248077,
"learning_rate": 1.335474366191786e-05,
"loss": 1.2503,
"step": 731
},
{
"epoch": 1.23,
"grad_norm": 0.17807281017303467,
"learning_rate": 1.3337640888788734e-05,
"loss": 1.2328,
"step": 732
},
{
"epoch": 1.23,
"grad_norm": 0.18027430772781372,
"learning_rate": 1.3320527121928829e-05,
"loss": 1.1623,
"step": 733
},
{
"epoch": 1.23,
"grad_norm": 0.18117040395736694,
"learning_rate": 1.3303402417708532e-05,
"loss": 1.3049,
"step": 734
},
{
"epoch": 1.23,
"grad_norm": 0.17273138463497162,
"learning_rate": 1.328626683253428e-05,
"loss": 1.276,
"step": 735
},
{
"epoch": 1.24,
"grad_norm": 0.18830521404743195,
"learning_rate": 1.3269120422848332e-05,
"loss": 1.2633,
"step": 736
},
{
"epoch": 1.24,
"grad_norm": 0.225563183426857,
"learning_rate": 1.3251963245128602e-05,
"loss": 1.2619,
"step": 737
},
{
"epoch": 1.24,
"grad_norm": 0.17559683322906494,
"learning_rate": 1.3234795355888488e-05,
"loss": 1.2249,
"step": 738
},
{
"epoch": 1.24,
"grad_norm": 0.17287300527095795,
"learning_rate": 1.321761681167665e-05,
"loss": 1.2516,
"step": 739
},
{
"epoch": 1.24,
"grad_norm": 0.17839746177196503,
"learning_rate": 1.3200427669076858e-05,
"loss": 1.183,
"step": 740
},
{
"epoch": 1.24,
"grad_norm": 0.185985267162323,
"learning_rate": 1.3183227984707788e-05,
"loss": 1.2423,
"step": 741
},
{
"epoch": 1.25,
"grad_norm": 0.17377838492393494,
"learning_rate": 1.316601781522284e-05,
"loss": 1.2184,
"step": 742
},
{
"epoch": 1.25,
"grad_norm": 0.21033476293087006,
"learning_rate": 1.3148797217309945e-05,
"loss": 1.1849,
"step": 743
},
{
"epoch": 1.25,
"grad_norm": 0.17651298642158508,
"learning_rate": 1.3131566247691387e-05,
"loss": 1.2037,
"step": 744
},
{
"epoch": 1.25,
"grad_norm": 0.17238590121269226,
"learning_rate": 1.3114324963123624e-05,
"loss": 1.1511,
"step": 745
},
{
"epoch": 1.25,
"grad_norm": 0.17908811569213867,
"learning_rate": 1.3097073420397068e-05,
"loss": 1.1719,
"step": 746
},
{
"epoch": 1.25,
"grad_norm": 0.18808238208293915,
"learning_rate": 1.3079811676335933e-05,
"loss": 1.1895,
"step": 747
},
{
"epoch": 1.26,
"grad_norm": 0.17913520336151123,
"learning_rate": 1.3062539787798042e-05,
"loss": 1.2519,
"step": 748
},
{
"epoch": 1.26,
"grad_norm": 0.18027973175048828,
"learning_rate": 1.3045257811674618e-05,
"loss": 1.2937,
"step": 749
},
{
"epoch": 1.26,
"grad_norm": 0.19399555027484894,
"learning_rate": 1.3027965804890118e-05,
"loss": 1.1962,
"step": 750
},
{
"epoch": 1.26,
"grad_norm": 0.17674273252487183,
"learning_rate": 1.3010663824402039e-05,
"loss": 1.203,
"step": 751
},
{
"epoch": 1.26,
"grad_norm": 0.1814504712820053,
"learning_rate": 1.2993351927200729e-05,
"loss": 1.2322,
"step": 752
},
{
"epoch": 1.27,
"grad_norm": 0.18903951346874237,
"learning_rate": 1.2976030170309199e-05,
"loss": 1.1964,
"step": 753
},
{
"epoch": 1.27,
"grad_norm": 0.1802082657814026,
"learning_rate": 1.2958698610782939e-05,
"loss": 1.2374,
"step": 754
},
{
"epoch": 1.27,
"grad_norm": 0.18641608953475952,
"learning_rate": 1.2941357305709724e-05,
"loss": 1.217,
"step": 755
},
{
"epoch": 1.27,
"grad_norm": 0.1775893121957779,
"learning_rate": 1.2924006312209434e-05,
"loss": 1.2446,
"step": 756
},
{
"epoch": 1.27,
"grad_norm": 0.21701885759830475,
"learning_rate": 1.2906645687433858e-05,
"loss": 1.1952,
"step": 757
},
{
"epoch": 1.27,
"grad_norm": 1.1721103191375732,
"learning_rate": 1.288927548856651e-05,
"loss": 1.3315,
"step": 758
},
{
"epoch": 1.28,
"grad_norm": 0.19379550218582153,
"learning_rate": 1.2871895772822442e-05,
"loss": 1.26,
"step": 759
},
{
"epoch": 1.28,
"grad_norm": 0.18671070039272308,
"learning_rate": 1.2854506597448049e-05,
"loss": 1.2145,
"step": 760
},
{
"epoch": 1.28,
"grad_norm": 0.1993437558412552,
"learning_rate": 1.2837108019720885e-05,
"loss": 1.2384,
"step": 761
},
{
"epoch": 1.28,
"grad_norm": 0.17678485810756683,
"learning_rate": 1.2819700096949484e-05,
"loss": 1.2354,
"step": 762
},
{
"epoch": 1.28,
"grad_norm": 0.17472977936267853,
"learning_rate": 1.2802282886473143e-05,
"loss": 1.2305,
"step": 763
},
{
"epoch": 1.28,
"grad_norm": 0.1764778047800064,
"learning_rate": 1.2784856445661764e-05,
"loss": 1.1744,
"step": 764
},
{
"epoch": 1.29,
"grad_norm": 0.17622295022010803,
"learning_rate": 1.2767420831915658e-05,
"loss": 1.2082,
"step": 765
},
{
"epoch": 1.29,
"grad_norm": 0.17645548284053802,
"learning_rate": 1.2749976102665336e-05,
"loss": 1.1781,
"step": 766
},
{
"epoch": 1.29,
"grad_norm": 0.18209517002105713,
"learning_rate": 1.2732522315371338e-05,
"loss": 1.266,
"step": 767
},
{
"epoch": 1.29,
"grad_norm": 0.20055533945560455,
"learning_rate": 1.271505952752405e-05,
"loss": 1.1906,
"step": 768
},
{
"epoch": 1.29,
"grad_norm": 0.1801963597536087,
"learning_rate": 1.2697587796643496e-05,
"loss": 1.1623,
"step": 769
},
{
"epoch": 1.29,
"grad_norm": 0.17526021599769592,
"learning_rate": 1.2680107180279148e-05,
"loss": 1.2486,
"step": 770
},
{
"epoch": 1.3,
"grad_norm": 0.18920443952083588,
"learning_rate": 1.266261773600977e-05,
"loss": 1.1747,
"step": 771
},
{
"epoch": 1.3,
"grad_norm": 0.18233636021614075,
"learning_rate": 1.2645119521443183e-05,
"loss": 1.2689,
"step": 772
},
{
"epoch": 1.3,
"grad_norm": 0.19687601923942566,
"learning_rate": 1.2627612594216105e-05,
"loss": 1.2542,
"step": 773
},
{
"epoch": 1.3,
"grad_norm": 0.22582928836345673,
"learning_rate": 1.261009701199395e-05,
"loss": 1.1682,
"step": 774
},
{
"epoch": 1.3,
"grad_norm": 0.1888149231672287,
"learning_rate": 1.2592572832470637e-05,
"loss": 1.1977,
"step": 775
},
{
"epoch": 1.3,
"grad_norm": 0.17129340767860413,
"learning_rate": 1.2575040113368413e-05,
"loss": 1.1688,
"step": 776
},
{
"epoch": 1.31,
"grad_norm": 0.20140545070171356,
"learning_rate": 1.2557498912437642e-05,
"loss": 1.2166,
"step": 777
},
{
"epoch": 1.31,
"grad_norm": 0.22548937797546387,
"learning_rate": 1.2539949287456634e-05,
"loss": 1.2232,
"step": 778
},
{
"epoch": 1.31,
"grad_norm": 0.18302060663700104,
"learning_rate": 1.2522391296231444e-05,
"loss": 1.1677,
"step": 779
},
{
"epoch": 1.31,
"grad_norm": 0.1751432567834854,
"learning_rate": 1.2504824996595684e-05,
"loss": 1.1852,
"step": 780
},
{
"epoch": 1.31,
"grad_norm": 0.18371136486530304,
"learning_rate": 1.248725044641033e-05,
"loss": 1.1432,
"step": 781
},
{
"epoch": 1.31,
"grad_norm": 0.1811445653438568,
"learning_rate": 1.2469667703563543e-05,
"loss": 1.2098,
"step": 782
},
{
"epoch": 1.32,
"grad_norm": 0.1764684021472931,
"learning_rate": 1.2452076825970458e-05,
"loss": 1.2012,
"step": 783
},
{
"epoch": 1.32,
"grad_norm": 0.18252627551555634,
"learning_rate": 1.2434477871573014e-05,
"loss": 1.2345,
"step": 784
},
{
"epoch": 1.32,
"grad_norm": 0.18287749588489532,
"learning_rate": 1.2416870898339748e-05,
"loss": 1.208,
"step": 785
},
{
"epoch": 1.32,
"grad_norm": 0.17731842398643494,
"learning_rate": 1.2399255964265613e-05,
"loss": 1.184,
"step": 786
},
{
"epoch": 1.32,
"grad_norm": 0.17872479557991028,
"learning_rate": 1.2381633127371784e-05,
"loss": 1.2549,
"step": 787
},
{
"epoch": 1.32,
"grad_norm": 0.17636586725711823,
"learning_rate": 1.2364002445705463e-05,
"loss": 1.1565,
"step": 788
},
{
"epoch": 1.33,
"grad_norm": 0.17690224945545197,
"learning_rate": 1.2346363977339698e-05,
"loss": 1.2265,
"step": 789
},
{
"epoch": 1.33,
"grad_norm": 0.17399299144744873,
"learning_rate": 1.2328717780373178e-05,
"loss": 1.1855,
"step": 790
},
{
"epoch": 1.33,
"grad_norm": 0.18190020322799683,
"learning_rate": 1.2311063912930054e-05,
"loss": 1.2854,
"step": 791
},
{
"epoch": 1.33,
"grad_norm": 0.1864079087972641,
"learning_rate": 1.2293402433159744e-05,
"loss": 1.2185,
"step": 792
},
{
"epoch": 1.33,
"grad_norm": 0.1937561333179474,
"learning_rate": 1.2275733399236734e-05,
"loss": 1.1948,
"step": 793
},
{
"epoch": 1.33,
"grad_norm": 0.18610996007919312,
"learning_rate": 1.22580568693604e-05,
"loss": 1.2336,
"step": 794
},
{
"epoch": 1.34,
"grad_norm": 0.18320509791374207,
"learning_rate": 1.2240372901754798e-05,
"loss": 1.3031,
"step": 795
},
{
"epoch": 1.34,
"grad_norm": 0.18615087866783142,
"learning_rate": 1.2222681554668489e-05,
"loss": 1.1994,
"step": 796
},
{
"epoch": 1.34,
"grad_norm": 0.18076477944850922,
"learning_rate": 1.2204982886374348e-05,
"loss": 1.1994,
"step": 797
},
{
"epoch": 1.34,
"grad_norm": 0.19473476707935333,
"learning_rate": 1.2187276955169356e-05,
"loss": 1.1846,
"step": 798
},
{
"epoch": 1.34,
"grad_norm": 0.2395877242088318,
"learning_rate": 1.2169563819374413e-05,
"loss": 1.1453,
"step": 799
},
{
"epoch": 1.34,
"grad_norm": 0.18047839403152466,
"learning_rate": 1.2151843537334163e-05,
"loss": 1.1209,
"step": 800
},
{
"epoch": 1.0,
"grad_norm": 0.19994132220745087,
"learning_rate": 1.2134116167416777e-05,
"loss": 1.1086,
"step": 801
},
{
"epoch": 1.0,
"grad_norm": 0.17648063600063324,
"learning_rate": 1.211638176801378e-05,
"loss": 1.1259,
"step": 802
},
{
"epoch": 1.01,
"grad_norm": 0.19430503249168396,
"learning_rate": 1.209864039753985e-05,
"loss": 1.1437,
"step": 803
},
{
"epoch": 1.01,
"grad_norm": 0.17653770744800568,
"learning_rate": 1.208089211443262e-05,
"loss": 1.0962,
"step": 804
},
{
"epoch": 1.01,
"grad_norm": 0.20566996932029724,
"learning_rate": 1.20631369771525e-05,
"loss": 1.1442,
"step": 805
},
{
"epoch": 1.01,
"grad_norm": 0.18861688673496246,
"learning_rate": 1.2045375044182477e-05,
"loss": 1.0776,
"step": 806
},
{
"epoch": 1.01,
"grad_norm": 0.20388789474964142,
"learning_rate": 1.2027606374027916e-05,
"loss": 1.0745,
"step": 807
},
{
"epoch": 1.01,
"grad_norm": 0.19004268944263458,
"learning_rate": 1.2009831025216379e-05,
"loss": 1.1103,
"step": 808
},
{
"epoch": 1.02,
"grad_norm": 0.21997405588626862,
"learning_rate": 1.1992049056297418e-05,
"loss": 1.0926,
"step": 809
},
{
"epoch": 1.02,
"grad_norm": 0.19315621256828308,
"learning_rate": 1.1974260525842408e-05,
"loss": 1.1291,
"step": 810
},
{
"epoch": 1.02,
"grad_norm": 0.32171598076820374,
"learning_rate": 1.1956465492444316e-05,
"loss": 1.1349,
"step": 811
},
{
"epoch": 1.02,
"grad_norm": 0.19857493042945862,
"learning_rate": 1.1938664014717538e-05,
"loss": 1.1059,
"step": 812
},
{
"epoch": 1.02,
"grad_norm": 0.27881693840026855,
"learning_rate": 1.1920856151297705e-05,
"loss": 1.1333,
"step": 813
},
{
"epoch": 1.02,
"grad_norm": 0.2011502981185913,
"learning_rate": 1.1903041960841473e-05,
"loss": 1.1394,
"step": 814
},
{
"epoch": 1.03,
"grad_norm": 0.1957554966211319,
"learning_rate": 1.1885221502026331e-05,
"loss": 1.0156,
"step": 815
},
{
"epoch": 1.03,
"grad_norm": 0.21186351776123047,
"learning_rate": 1.186739483355043e-05,
"loss": 1.1461,
"step": 816
},
{
"epoch": 1.03,
"grad_norm": 0.2122260183095932,
"learning_rate": 1.1849562014132367e-05,
"loss": 1.029,
"step": 817
},
{
"epoch": 1.03,
"grad_norm": 0.24291783571243286,
"learning_rate": 1.1831723102511002e-05,
"loss": 1.1428,
"step": 818
},
{
"epoch": 1.03,
"grad_norm": 0.3922155499458313,
"learning_rate": 1.1813878157445253e-05,
"loss": 1.1503,
"step": 819
},
{
"epoch": 1.03,
"grad_norm": 0.208229199051857,
"learning_rate": 1.1796027237713927e-05,
"loss": 1.1213,
"step": 820
},
{
"epoch": 1.04,
"grad_norm": 0.21276305615901947,
"learning_rate": 1.1778170402115502e-05,
"loss": 1.0765,
"step": 821
},
{
"epoch": 1.04,
"grad_norm": 0.21143099665641785,
"learning_rate": 1.1760307709467938e-05,
"loss": 1.078,
"step": 822
},
{
"epoch": 1.04,
"grad_norm": 0.24757124483585358,
"learning_rate": 1.1742439218608492e-05,
"loss": 1.1392,
"step": 823
},
{
"epoch": 1.04,
"grad_norm": 0.23540030419826508,
"learning_rate": 1.1724564988393523e-05,
"loss": 1.0784,
"step": 824
},
{
"epoch": 1.04,
"grad_norm": 0.21749743819236755,
"learning_rate": 1.1706685077698288e-05,
"loss": 1.1815,
"step": 825
},
{
"epoch": 1.04,
"grad_norm": 0.2104983776807785,
"learning_rate": 1.168879954541676e-05,
"loss": 1.09,
"step": 826
},
{
"epoch": 1.05,
"grad_norm": 0.2709580957889557,
"learning_rate": 1.1670908450461422e-05,
"loss": 1.0589,
"step": 827
},
{
"epoch": 1.05,
"grad_norm": 0.20893564820289612,
"learning_rate": 1.165301185176309e-05,
"loss": 1.109,
"step": 828
},
{
"epoch": 1.05,
"grad_norm": 0.22274000942707062,
"learning_rate": 1.1635109808270696e-05,
"loss": 1.0345,
"step": 829
},
{
"epoch": 1.05,
"grad_norm": 0.289896160364151,
"learning_rate": 1.1617202378951124e-05,
"loss": 1.1328,
"step": 830
},
{
"epoch": 1.05,
"grad_norm": 0.2315712571144104,
"learning_rate": 1.1599289622788976e-05,
"loss": 1.1496,
"step": 831
},
{
"epoch": 1.05,
"grad_norm": 0.2189183086156845,
"learning_rate": 1.1581371598786413e-05,
"loss": 1.063,
"step": 832
},
{
"epoch": 1.06,
"grad_norm": 0.23269696533679962,
"learning_rate": 1.1563448365962951e-05,
"loss": 1.108,
"step": 833
},
{
"epoch": 1.06,
"grad_norm": 0.22079916298389435,
"learning_rate": 1.1545519983355255e-05,
"loss": 1.1076,
"step": 834
},
{
"epoch": 1.06,
"grad_norm": 0.2187114953994751,
"learning_rate": 1.152758651001695e-05,
"loss": 1.1034,
"step": 835
},
{
"epoch": 1.06,
"grad_norm": 0.25479793548583984,
"learning_rate": 1.1509648005018445e-05,
"loss": 1.1252,
"step": 836
},
{
"epoch": 1.06,
"grad_norm": 0.22512087225914001,
"learning_rate": 1.1491704527446706e-05,
"loss": 1.0329,
"step": 837
},
{
"epoch": 1.06,
"grad_norm": 0.2142249047756195,
"learning_rate": 1.1473756136405086e-05,
"loss": 1.084,
"step": 838
},
{
"epoch": 1.07,
"grad_norm": 0.21227596700191498,
"learning_rate": 1.1455802891013119e-05,
"loss": 1.0162,
"step": 839
},
{
"epoch": 1.07,
"grad_norm": 0.21206368505954742,
"learning_rate": 1.143784485040633e-05,
"loss": 1.0252,
"step": 840
},
{
"epoch": 1.07,
"grad_norm": 0.22016692161560059,
"learning_rate": 1.1419882073736042e-05,
"loss": 1.1123,
"step": 841
},
{
"epoch": 1.07,
"grad_norm": 0.23406410217285156,
"learning_rate": 1.140191462016917e-05,
"loss": 1.1437,
"step": 842
},
{
"epoch": 1.07,
"grad_norm": 0.22070090472698212,
"learning_rate": 1.1383942548888046e-05,
"loss": 1.0439,
"step": 843
},
{
"epoch": 1.07,
"grad_norm": 0.25555887818336487,
"learning_rate": 1.1365965919090196e-05,
"loss": 1.0804,
"step": 844
},
{
"epoch": 1.08,
"grad_norm": 0.21243128180503845,
"learning_rate": 1.1347984789988175e-05,
"loss": 1.0437,
"step": 845
},
{
"epoch": 1.08,
"grad_norm": 0.22087867558002472,
"learning_rate": 1.1329999220809354e-05,
"loss": 1.1799,
"step": 846
},
{
"epoch": 1.08,
"grad_norm": 0.2302551418542862,
"learning_rate": 1.131200927079573e-05,
"loss": 1.1045,
"step": 847
},
{
"epoch": 1.08,
"grad_norm": 0.21491187810897827,
"learning_rate": 1.129401499920372e-05,
"loss": 1.0882,
"step": 848
},
{
"epoch": 1.08,
"grad_norm": 0.22292113304138184,
"learning_rate": 1.1276016465303989e-05,
"loss": 1.1081,
"step": 849
},
{
"epoch": 1.08,
"grad_norm": 0.2183646559715271,
"learning_rate": 1.1258013728381241e-05,
"loss": 1.142,
"step": 850
},
{
"epoch": 1.09,
"grad_norm": 0.23271261155605316,
"learning_rate": 1.1240006847734012e-05,
"loss": 1.1016,
"step": 851
},
{
"epoch": 1.09,
"grad_norm": 0.23348087072372437,
"learning_rate": 1.1221995882674494e-05,
"loss": 1.1689,
"step": 852
},
{
"epoch": 1.09,
"grad_norm": 0.22539764642715454,
"learning_rate": 1.1203980892528339e-05,
"loss": 1.0686,
"step": 853
},
{
"epoch": 1.09,
"grad_norm": 0.22574321925640106,
"learning_rate": 1.1185961936634446e-05,
"loss": 1.1275,
"step": 854
},
{
"epoch": 1.09,
"grad_norm": 0.22307804226875305,
"learning_rate": 1.1167939074344782e-05,
"loss": 1.1294,
"step": 855
},
{
"epoch": 1.09,
"grad_norm": 0.2302752584218979,
"learning_rate": 1.1149912365024182e-05,
"loss": 1.0557,
"step": 856
},
{
"epoch": 1.1,
"grad_norm": 0.22006703913211823,
"learning_rate": 1.1131881868050151e-05,
"loss": 1.0894,
"step": 857
},
{
"epoch": 1.1,
"grad_norm": 0.21575981378555298,
"learning_rate": 1.1113847642812669e-05,
"loss": 1.0884,
"step": 858
},
{
"epoch": 1.1,
"grad_norm": 0.2194596529006958,
"learning_rate": 1.1095809748714002e-05,
"loss": 1.0356,
"step": 859
},
{
"epoch": 1.1,
"grad_norm": 0.2199021875858307,
"learning_rate": 1.107776824516849e-05,
"loss": 1.0905,
"step": 860
},
{
"epoch": 1.1,
"grad_norm": 0.2257576435804367,
"learning_rate": 1.1059723191602368e-05,
"loss": 1.1408,
"step": 861
},
{
"epoch": 1.1,
"grad_norm": 0.21841183304786682,
"learning_rate": 1.1041674647453569e-05,
"loss": 1.075,
"step": 862
},
{
"epoch": 1.11,
"grad_norm": 0.22252121567726135,
"learning_rate": 1.102362267217152e-05,
"loss": 1.1138,
"step": 863
},
{
"epoch": 1.11,
"grad_norm": 0.2398969680070877,
"learning_rate": 1.1005567325216946e-05,
"loss": 1.1331,
"step": 864
},
{
"epoch": 1.11,
"grad_norm": 0.21763458847999573,
"learning_rate": 1.098750866606168e-05,
"loss": 1.0558,
"step": 865
},
{
"epoch": 1.11,
"grad_norm": 0.24076849222183228,
"learning_rate": 1.0969446754188466e-05,
"loss": 1.1461,
"step": 866
},
{
"epoch": 1.11,
"grad_norm": 0.23264744877815247,
"learning_rate": 1.0951381649090766e-05,
"loss": 1.0944,
"step": 867
},
{
"epoch": 1.11,
"grad_norm": 0.22494088113307953,
"learning_rate": 1.0933313410272547e-05,
"loss": 1.0656,
"step": 868
},
{
"epoch": 1.12,
"grad_norm": 0.24088388681411743,
"learning_rate": 1.0915242097248114e-05,
"loss": 1.0812,
"step": 869
},
{
"epoch": 1.12,
"grad_norm": 0.21584591269493103,
"learning_rate": 1.0897167769541888e-05,
"loss": 1.0318,
"step": 870
},
{
"epoch": 1.12,
"grad_norm": 0.21594910323619843,
"learning_rate": 1.0879090486688221e-05,
"loss": 1.0855,
"step": 871
},
{
"epoch": 1.12,
"grad_norm": 0.2234317660331726,
"learning_rate": 1.0861010308231206e-05,
"loss": 1.1345,
"step": 872
},
{
"epoch": 1.12,
"grad_norm": 0.21741139888763428,
"learning_rate": 1.0842927293724461e-05,
"loss": 1.0099,
"step": 873
},
{
"epoch": 1.12,
"grad_norm": 0.23724225163459778,
"learning_rate": 1.0824841502730958e-05,
"loss": 1.1176,
"step": 874
},
{
"epoch": 1.13,
"grad_norm": 0.22442878782749176,
"learning_rate": 1.0806752994822807e-05,
"loss": 1.092,
"step": 875
},
{
"epoch": 1.13,
"grad_norm": 0.2207866907119751,
"learning_rate": 1.0788661829581069e-05,
"loss": 1.0059,
"step": 876
},
{
"epoch": 1.13,
"grad_norm": 0.29180410504341125,
"learning_rate": 1.0770568066595557e-05,
"loss": 1.1039,
"step": 877
},
{
"epoch": 1.13,
"grad_norm": 0.2219434380531311,
"learning_rate": 1.0752471765464643e-05,
"loss": 1.0736,
"step": 878
},
{
"epoch": 1.13,
"grad_norm": 0.2247844636440277,
"learning_rate": 1.0734372985795062e-05,
"loss": 1.049,
"step": 879
},
{
"epoch": 1.13,
"grad_norm": 0.22250817716121674,
"learning_rate": 1.0716271787201704e-05,
"loss": 1.0894,
"step": 880
},
{
"epoch": 1.14,
"grad_norm": 0.22135023772716522,
"learning_rate": 1.069816822930743e-05,
"loss": 1.1142,
"step": 881
},
{
"epoch": 1.14,
"grad_norm": 0.22517652809619904,
"learning_rate": 1.0680062371742878e-05,
"loss": 1.0578,
"step": 882
},
{
"epoch": 1.14,
"grad_norm": 0.22929422557353973,
"learning_rate": 1.066195427414626e-05,
"loss": 1.0888,
"step": 883
},
{
"epoch": 1.14,
"grad_norm": 0.22495953738689423,
"learning_rate": 1.0643843996163154e-05,
"loss": 1.0843,
"step": 884
},
{
"epoch": 1.14,
"grad_norm": 0.26695767045021057,
"learning_rate": 1.0625731597446333e-05,
"loss": 1.1115,
"step": 885
},
{
"epoch": 1.14,
"grad_norm": 0.2314533144235611,
"learning_rate": 1.0607617137655554e-05,
"loss": 1.1238,
"step": 886
},
{
"epoch": 1.15,
"grad_norm": 0.23202592134475708,
"learning_rate": 1.0589500676457358e-05,
"loss": 1.0729,
"step": 887
},
{
"epoch": 1.15,
"grad_norm": 0.24274666607379913,
"learning_rate": 1.057138227352488e-05,
"loss": 1.1326,
"step": 888
},
{
"epoch": 1.15,
"grad_norm": 0.22568246722221375,
"learning_rate": 1.055326198853765e-05,
"loss": 1.0958,
"step": 889
},
{
"epoch": 1.15,
"grad_norm": 0.24162302911281586,
"learning_rate": 1.0535139881181404e-05,
"loss": 1.129,
"step": 890
},
{
"epoch": 1.15,
"grad_norm": 0.26246607303619385,
"learning_rate": 1.051701601114787e-05,
"loss": 1.1259,
"step": 891
},
{
"epoch": 1.15,
"grad_norm": 0.23852334916591644,
"learning_rate": 1.0498890438134587e-05,
"loss": 1.144,
"step": 892
},
{
"epoch": 1.16,
"grad_norm": 0.2319112867116928,
"learning_rate": 1.0480763221844705e-05,
"loss": 1.1057,
"step": 893
},
{
"epoch": 1.16,
"grad_norm": 0.22893072664737701,
"learning_rate": 1.0462634421986786e-05,
"loss": 1.1167,
"step": 894
},
{
"epoch": 1.16,
"grad_norm": 0.23033589124679565,
"learning_rate": 1.0444504098274608e-05,
"loss": 1.0821,
"step": 895
},
{
"epoch": 1.16,
"grad_norm": 0.6265642046928406,
"learning_rate": 1.042637231042697e-05,
"loss": 1.1138,
"step": 896
},
{
"epoch": 1.16,
"grad_norm": 0.23570573329925537,
"learning_rate": 1.0408239118167483e-05,
"loss": 1.0668,
"step": 897
},
{
"epoch": 1.16,
"grad_norm": 0.22059451043605804,
"learning_rate": 1.0390104581224401e-05,
"loss": 1.1016,
"step": 898
},
{
"epoch": 1.17,
"grad_norm": 0.23044194281101227,
"learning_rate": 1.0371968759330393e-05,
"loss": 1.0724,
"step": 899
},
{
"epoch": 1.17,
"grad_norm": 0.23537249863147736,
"learning_rate": 1.0353831712222369e-05,
"loss": 1.1254,
"step": 900
},
{
"epoch": 1.17,
"grad_norm": 0.24956373870372772,
"learning_rate": 1.0335693499641272e-05,
"loss": 1.0539,
"step": 901
},
{
"epoch": 1.17,
"grad_norm": 0.23396573960781097,
"learning_rate": 1.0317554181331882e-05,
"loss": 1.1686,
"step": 902
},
{
"epoch": 1.17,
"grad_norm": 0.23241214454174042,
"learning_rate": 1.0299413817042623e-05,
"loss": 1.1003,
"step": 903
},
{
"epoch": 1.17,
"grad_norm": 0.2384105920791626,
"learning_rate": 1.0281272466525365e-05,
"loss": 1.0673,
"step": 904
},
{
"epoch": 1.18,
"grad_norm": 0.24274693429470062,
"learning_rate": 1.0263130189535225e-05,
"loss": 1.059,
"step": 905
},
{
"epoch": 1.18,
"grad_norm": 0.23455826938152313,
"learning_rate": 1.0244987045830373e-05,
"loss": 1.1422,
"step": 906
},
{
"epoch": 1.18,
"grad_norm": 0.22994031012058258,
"learning_rate": 1.0226843095171828e-05,
"loss": 1.0705,
"step": 907
},
{
"epoch": 1.18,
"grad_norm": 0.2689706087112427,
"learning_rate": 1.020869839732328e-05,
"loss": 1.1133,
"step": 908
},
{
"epoch": 1.18,
"grad_norm": 0.23585017025470734,
"learning_rate": 1.0190553012050868e-05,
"loss": 1.0709,
"step": 909
},
{
"epoch": 1.18,
"grad_norm": 0.2313101440668106,
"learning_rate": 1.0172406999122998e-05,
"loss": 1.114,
"step": 910
},
{
"epoch": 1.19,
"grad_norm": 0.24704484641551971,
"learning_rate": 1.0154260418310148e-05,
"loss": 1.1461,
"step": 911
},
{
"epoch": 1.19,
"grad_norm": 0.23091839253902435,
"learning_rate": 1.0136113329384662e-05,
"loss": 1.127,
"step": 912
},
{
"epoch": 1.19,
"grad_norm": 0.23046812415122986,
"learning_rate": 1.0117965792120558e-05,
"loss": 1.1183,
"step": 913
},
{
"epoch": 1.19,
"grad_norm": 0.23749960958957672,
"learning_rate": 1.0099817866293336e-05,
"loss": 1.123,
"step": 914
},
{
"epoch": 1.19,
"grad_norm": 0.22944402694702148,
"learning_rate": 1.0081669611679768e-05,
"loss": 1.0679,
"step": 915
},
{
"epoch": 1.19,
"grad_norm": 0.22915644943714142,
"learning_rate": 1.0063521088057713e-05,
"loss": 1.1386,
"step": 916
},
{
"epoch": 1.2,
"grad_norm": 0.35416677594184875,
"learning_rate": 1.0045372355205916e-05,
"loss": 1.1181,
"step": 917
},
{
"epoch": 1.2,
"grad_norm": 0.23419159650802612,
"learning_rate": 1.0027223472903811e-05,
"loss": 1.0764,
"step": 918
},
{
"epoch": 1.2,
"grad_norm": 0.23797288537025452,
"learning_rate": 1.0009074500931324e-05,
"loss": 1.1363,
"step": 919
},
{
"epoch": 1.2,
"grad_norm": 0.22536231577396393,
"learning_rate": 9.990925499068676e-06,
"loss": 1.0138,
"step": 920
},
{
"epoch": 1.2,
"grad_norm": 0.23488272726535797,
"learning_rate": 9.972776527096192e-06,
"loss": 1.1134,
"step": 921
},
{
"epoch": 1.2,
"grad_norm": 0.23040106892585754,
"learning_rate": 9.95462764479409e-06,
"loss": 1.0688,
"step": 922
},
{
"epoch": 1.21,
"grad_norm": 0.22860872745513916,
"learning_rate": 9.936478911942292e-06,
"loss": 1.0538,
"step": 923
},
{
"epoch": 1.21,
"grad_norm": 0.23579947650432587,
"learning_rate": 9.918330388320235e-06,
"loss": 1.1139,
"step": 924
},
{
"epoch": 1.21,
"grad_norm": 0.23558510839939117,
"learning_rate": 9.900182133706664e-06,
"loss": 1.1438,
"step": 925
},
{
"epoch": 1.21,
"grad_norm": 0.29158398509025574,
"learning_rate": 9.882034207879443e-06,
"loss": 1.0796,
"step": 926
},
{
"epoch": 1.21,
"grad_norm": 0.24667367339134216,
"learning_rate": 9.863886670615341e-06,
"loss": 1.0123,
"step": 927
},
{
"epoch": 1.22,
"grad_norm": 0.2342100292444229,
"learning_rate": 9.845739581689854e-06,
"loss": 1.1214,
"step": 928
},
{
"epoch": 1.22,
"grad_norm": 0.25424760580062866,
"learning_rate": 9.827593000877006e-06,
"loss": 1.1295,
"step": 929
},
{
"epoch": 1.22,
"grad_norm": 0.23327143490314484,
"learning_rate": 9.809446987949137e-06,
"loss": 1.0927,
"step": 930
},
{
"epoch": 1.22,
"grad_norm": 0.22679513692855835,
"learning_rate": 9.791301602676722e-06,
"loss": 1.0826,
"step": 931
},
{
"epoch": 1.22,
"grad_norm": 0.23058003187179565,
"learning_rate": 9.773156904828177e-06,
"loss": 1.0583,
"step": 932
},
{
"epoch": 1.22,
"grad_norm": 0.2467791885137558,
"learning_rate": 9.755012954169632e-06,
"loss": 1.0698,
"step": 933
},
{
"epoch": 1.23,
"grad_norm": 0.23092246055603027,
"learning_rate": 9.736869810464777e-06,
"loss": 1.065,
"step": 934
},
{
"epoch": 1.23,
"grad_norm": 0.24469321966171265,
"learning_rate": 9.71872753347464e-06,
"loss": 1.0921,
"step": 935
},
{
"epoch": 1.23,
"grad_norm": 0.24068665504455566,
"learning_rate": 9.700586182957378e-06,
"loss": 1.1057,
"step": 936
},
{
"epoch": 1.23,
"grad_norm": 0.23825204372406006,
"learning_rate": 9.68244581866812e-06,
"loss": 1.1011,
"step": 937
},
{
"epoch": 1.23,
"grad_norm": 0.2711021602153778,
"learning_rate": 9.664306500358733e-06,
"loss": 1.0842,
"step": 938
},
{
"epoch": 1.23,
"grad_norm": 0.23717324435710907,
"learning_rate": 9.646168287777633e-06,
"loss": 1.1811,
"step": 939
},
{
"epoch": 1.24,
"grad_norm": 0.23125453293323517,
"learning_rate": 9.628031240669609e-06,
"loss": 1.1428,
"step": 940
},
{
"epoch": 1.24,
"grad_norm": 0.2401907593011856,
"learning_rate": 9.6098954187756e-06,
"loss": 1.164,
"step": 941
},
{
"epoch": 1.24,
"grad_norm": 0.2449384182691574,
"learning_rate": 9.59176088183252e-06,
"loss": 1.0876,
"step": 942
},
{
"epoch": 1.24,
"grad_norm": 0.23211224377155304,
"learning_rate": 9.573627689573036e-06,
"loss": 1.1429,
"step": 943
},
{
"epoch": 1.24,
"grad_norm": 0.25469231605529785,
"learning_rate": 9.555495901725392e-06,
"loss": 1.11,
"step": 944
},
{
"epoch": 1.24,
"grad_norm": 0.47985512018203735,
"learning_rate": 9.537365578013217e-06,
"loss": 1.0603,
"step": 945
},
{
"epoch": 1.25,
"grad_norm": 0.24345599114894867,
"learning_rate": 9.519236778155298e-06,
"loss": 1.1349,
"step": 946
},
{
"epoch": 1.25,
"grad_norm": 0.22641409933567047,
"learning_rate": 9.501109561865415e-06,
"loss": 1.094,
"step": 947
},
{
"epoch": 1.25,
"grad_norm": 0.23288355767726898,
"learning_rate": 9.482983988852137e-06,
"loss": 1.085,
"step": 948
},
{
"epoch": 1.25,
"grad_norm": 0.23492491245269775,
"learning_rate": 9.464860118818599e-06,
"loss": 1.0604,
"step": 949
},
{
"epoch": 1.25,
"grad_norm": 0.23956015706062317,
"learning_rate": 9.446738011462351e-06,
"loss": 1.0588,
"step": 950
}
],
"logging_steps": 1.0,
"max_steps": 1785,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 50,
"total_flos": 5.19510810627712e+19,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}