iplaw-llama3-8b-Instruct-20240808 / trainer_state.json
clinno's picture
Upload folder using huggingface_hub
e95fd93 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 8.994283294516197,
"eval_steps": 500,
"global_step": 5310,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01693838661867457,
"grad_norm": 5.0625,
"learning_rate": 1.8832391713747647e-07,
"loss": 2.0482,
"step": 10
},
{
"epoch": 0.03387677323734914,
"grad_norm": 5.59375,
"learning_rate": 3.7664783427495294e-07,
"loss": 2.069,
"step": 20
},
{
"epoch": 0.050815159856023716,
"grad_norm": 5.96875,
"learning_rate": 5.649717514124295e-07,
"loss": 2.0861,
"step": 30
},
{
"epoch": 0.06775354647469828,
"grad_norm": 5.65625,
"learning_rate": 7.532956685499059e-07,
"loss": 2.0413,
"step": 40
},
{
"epoch": 0.08469193309337286,
"grad_norm": 16.125,
"learning_rate": 9.416195856873824e-07,
"loss": 2.0095,
"step": 50
},
{
"epoch": 0.10163031971204743,
"grad_norm": 17.75,
"learning_rate": 1.129943502824859e-06,
"loss": 2.0333,
"step": 60
},
{
"epoch": 0.118568706330722,
"grad_norm": 14.3125,
"learning_rate": 1.3182674199623351e-06,
"loss": 2.0062,
"step": 70
},
{
"epoch": 0.13550709294939656,
"grad_norm": 15.5625,
"learning_rate": 1.5065913370998117e-06,
"loss": 1.9655,
"step": 80
},
{
"epoch": 0.15244547956807114,
"grad_norm": 14.4375,
"learning_rate": 1.6949152542372882e-06,
"loss": 1.9698,
"step": 90
},
{
"epoch": 0.16938386618674572,
"grad_norm": 3.4375,
"learning_rate": 1.8832391713747648e-06,
"loss": 1.9211,
"step": 100
},
{
"epoch": 0.18632225280542028,
"grad_norm": 2.703125,
"learning_rate": 2.071563088512241e-06,
"loss": 1.9071,
"step": 110
},
{
"epoch": 0.20326063942409486,
"grad_norm": 3.453125,
"learning_rate": 2.259887005649718e-06,
"loss": 1.8766,
"step": 120
},
{
"epoch": 0.22019902604276942,
"grad_norm": 2.9375,
"learning_rate": 2.448210922787194e-06,
"loss": 1.8211,
"step": 130
},
{
"epoch": 0.237137412661444,
"grad_norm": 3.859375,
"learning_rate": 2.6365348399246702e-06,
"loss": 1.7678,
"step": 140
},
{
"epoch": 0.2540757992801186,
"grad_norm": 2.515625,
"learning_rate": 2.8248587570621473e-06,
"loss": 1.6984,
"step": 150
},
{
"epoch": 0.2710141858987931,
"grad_norm": 2.671875,
"learning_rate": 3.0131826741996235e-06,
"loss": 1.6916,
"step": 160
},
{
"epoch": 0.2879525725174677,
"grad_norm": 2.515625,
"learning_rate": 3.2015065913371e-06,
"loss": 1.7083,
"step": 170
},
{
"epoch": 0.3048909591361423,
"grad_norm": 2.609375,
"learning_rate": 3.3898305084745763e-06,
"loss": 1.7021,
"step": 180
},
{
"epoch": 0.32182934575481686,
"grad_norm": 3.109375,
"learning_rate": 3.5781544256120534e-06,
"loss": 1.654,
"step": 190
},
{
"epoch": 0.33876773237349145,
"grad_norm": 1.875,
"learning_rate": 3.7664783427495296e-06,
"loss": 1.6125,
"step": 200
},
{
"epoch": 0.355706118992166,
"grad_norm": 1.9765625,
"learning_rate": 3.954802259887006e-06,
"loss": 1.6243,
"step": 210
},
{
"epoch": 0.37264450561084056,
"grad_norm": 2.046875,
"learning_rate": 4.143126177024482e-06,
"loss": 1.6311,
"step": 220
},
{
"epoch": 0.38958289222951514,
"grad_norm": 2.375,
"learning_rate": 4.331450094161959e-06,
"loss": 1.6088,
"step": 230
},
{
"epoch": 0.4065212788481897,
"grad_norm": 1.9765625,
"learning_rate": 4.519774011299436e-06,
"loss": 1.6126,
"step": 240
},
{
"epoch": 0.42345966546686425,
"grad_norm": 2.0,
"learning_rate": 4.7080979284369114e-06,
"loss": 1.5862,
"step": 250
},
{
"epoch": 0.44039805208553884,
"grad_norm": 1.9296875,
"learning_rate": 4.896421845574388e-06,
"loss": 1.5869,
"step": 260
},
{
"epoch": 0.4573364387042134,
"grad_norm": 2.03125,
"learning_rate": 5.084745762711865e-06,
"loss": 1.568,
"step": 270
},
{
"epoch": 0.474274825322888,
"grad_norm": 2.234375,
"learning_rate": 5.2730696798493405e-06,
"loss": 1.4917,
"step": 280
},
{
"epoch": 0.4912132119415626,
"grad_norm": 3.0,
"learning_rate": 5.461393596986818e-06,
"loss": 1.5614,
"step": 290
},
{
"epoch": 0.5081515985602372,
"grad_norm": 2.09375,
"learning_rate": 5.6497175141242946e-06,
"loss": 1.5684,
"step": 300
},
{
"epoch": 0.5250899851789117,
"grad_norm": 1.765625,
"learning_rate": 5.83804143126177e-06,
"loss": 1.5933,
"step": 310
},
{
"epoch": 0.5420283717975862,
"grad_norm": 1.953125,
"learning_rate": 6.026365348399247e-06,
"loss": 1.5864,
"step": 320
},
{
"epoch": 0.5589667584162609,
"grad_norm": 1.9296875,
"learning_rate": 6.2146892655367244e-06,
"loss": 1.539,
"step": 330
},
{
"epoch": 0.5759051450349354,
"grad_norm": 2.125,
"learning_rate": 6.4030131826742e-06,
"loss": 1.4972,
"step": 340
},
{
"epoch": 0.59284353165361,
"grad_norm": 1.765625,
"learning_rate": 6.591337099811677e-06,
"loss": 1.5136,
"step": 350
},
{
"epoch": 0.6097819182722846,
"grad_norm": 1.921875,
"learning_rate": 6.779661016949153e-06,
"loss": 1.5159,
"step": 360
},
{
"epoch": 0.6267203048909591,
"grad_norm": 1.8125,
"learning_rate": 6.96798493408663e-06,
"loss": 1.4829,
"step": 370
},
{
"epoch": 0.6436586915096337,
"grad_norm": 1.9296875,
"learning_rate": 7.156308851224107e-06,
"loss": 1.4972,
"step": 380
},
{
"epoch": 0.6605970781283083,
"grad_norm": 2.140625,
"learning_rate": 7.3446327683615825e-06,
"loss": 1.4115,
"step": 390
},
{
"epoch": 0.6775354647469829,
"grad_norm": 2.296875,
"learning_rate": 7.532956685499059e-06,
"loss": 1.5167,
"step": 400
},
{
"epoch": 0.6944738513656574,
"grad_norm": 1.96875,
"learning_rate": 7.721280602636536e-06,
"loss": 1.4496,
"step": 410
},
{
"epoch": 0.711412237984332,
"grad_norm": 1.7734375,
"learning_rate": 7.909604519774012e-06,
"loss": 1.4258,
"step": 420
},
{
"epoch": 0.7283506246030066,
"grad_norm": 3.28125,
"learning_rate": 8.097928436911489e-06,
"loss": 1.498,
"step": 430
},
{
"epoch": 0.7452890112216811,
"grad_norm": 2.078125,
"learning_rate": 8.286252354048965e-06,
"loss": 1.4588,
"step": 440
},
{
"epoch": 0.7622273978403558,
"grad_norm": 2.03125,
"learning_rate": 8.47457627118644e-06,
"loss": 1.4085,
"step": 450
},
{
"epoch": 0.7791657844590303,
"grad_norm": 1.9765625,
"learning_rate": 8.662900188323918e-06,
"loss": 1.4479,
"step": 460
},
{
"epoch": 0.7961041710777048,
"grad_norm": 2.25,
"learning_rate": 8.851224105461394e-06,
"loss": 1.4711,
"step": 470
},
{
"epoch": 0.8130425576963795,
"grad_norm": 2.078125,
"learning_rate": 9.039548022598871e-06,
"loss": 1.4004,
"step": 480
},
{
"epoch": 0.829980944315054,
"grad_norm": 2.09375,
"learning_rate": 9.227871939736347e-06,
"loss": 1.3785,
"step": 490
},
{
"epoch": 0.8469193309337285,
"grad_norm": 1.765625,
"learning_rate": 9.416195856873823e-06,
"loss": 1.3784,
"step": 500
},
{
"epoch": 0.8469193309337285,
"eval_loss": 1.401244044303894,
"eval_runtime": 21.2893,
"eval_samples_per_second": 24.66,
"eval_steps_per_second": 24.66,
"step": 500
},
{
"epoch": 0.8638577175524031,
"grad_norm": 1.96875,
"learning_rate": 9.6045197740113e-06,
"loss": 1.4401,
"step": 510
},
{
"epoch": 0.8807961041710777,
"grad_norm": 1.6640625,
"learning_rate": 9.792843691148776e-06,
"loss": 1.4191,
"step": 520
},
{
"epoch": 0.8977344907897523,
"grad_norm": 1.6015625,
"learning_rate": 9.981167608286254e-06,
"loss": 1.3972,
"step": 530
},
{
"epoch": 0.9146728774084268,
"grad_norm": 1.7421875,
"learning_rate": 9.999912491660088e-06,
"loss": 1.4051,
"step": 540
},
{
"epoch": 0.9316112640271014,
"grad_norm": 1.8984375,
"learning_rate": 9.999609997627497e-06,
"loss": 1.3855,
"step": 550
},
{
"epoch": 0.948549650645776,
"grad_norm": 1.765625,
"learning_rate": 9.999091450621204e-06,
"loss": 1.4033,
"step": 560
},
{
"epoch": 0.9654880372644505,
"grad_norm": 1.640625,
"learning_rate": 9.99835687304968e-06,
"loss": 1.4429,
"step": 570
},
{
"epoch": 0.9824264238831252,
"grad_norm": 1.8515625,
"learning_rate": 9.997406296656937e-06,
"loss": 1.3925,
"step": 580
},
{
"epoch": 0.9993648105017997,
"grad_norm": 1.78125,
"learning_rate": 9.996239762521152e-06,
"loss": 1.4055,
"step": 590
},
{
"epoch": 1.0163031971204743,
"grad_norm": 2.203125,
"learning_rate": 9.994857321052887e-06,
"loss": 1.2929,
"step": 600
},
{
"epoch": 1.0332415837391489,
"grad_norm": 1.8203125,
"learning_rate": 9.993259031992921e-06,
"loss": 1.2752,
"step": 610
},
{
"epoch": 1.0501799703578234,
"grad_norm": 1.59375,
"learning_rate": 9.991444964409665e-06,
"loss": 1.3491,
"step": 620
},
{
"epoch": 1.067118356976498,
"grad_norm": 1.8515625,
"learning_rate": 9.989415196696169e-06,
"loss": 1.3204,
"step": 630
},
{
"epoch": 1.0840567435951725,
"grad_norm": 1.625,
"learning_rate": 9.98716981656675e-06,
"loss": 1.3425,
"step": 640
},
{
"epoch": 1.1009951302138472,
"grad_norm": 1.921875,
"learning_rate": 9.98470892105318e-06,
"loss": 1.3116,
"step": 650
},
{
"epoch": 1.1179335168325217,
"grad_norm": 1.8203125,
"learning_rate": 9.982032616500516e-06,
"loss": 1.307,
"step": 660
},
{
"epoch": 1.1348719034511963,
"grad_norm": 1.875,
"learning_rate": 9.979141018562494e-06,
"loss": 1.3132,
"step": 670
},
{
"epoch": 1.1518102900698708,
"grad_norm": 1.84375,
"learning_rate": 9.976034252196524e-06,
"loss": 1.3304,
"step": 680
},
{
"epoch": 1.1687486766885455,
"grad_norm": 1.6796875,
"learning_rate": 9.9727124516583e-06,
"loss": 1.347,
"step": 690
},
{
"epoch": 1.18568706330722,
"grad_norm": 1.734375,
"learning_rate": 9.969175760495997e-06,
"loss": 1.2844,
"step": 700
},
{
"epoch": 1.2026254499258946,
"grad_norm": 1.6953125,
"learning_rate": 9.965424331544064e-06,
"loss": 1.2891,
"step": 710
},
{
"epoch": 1.2195638365445691,
"grad_norm": 1.828125,
"learning_rate": 9.961458326916624e-06,
"loss": 1.3258,
"step": 720
},
{
"epoch": 1.2365022231632437,
"grad_norm": 1.609375,
"learning_rate": 9.95727791800046e-06,
"loss": 1.2944,
"step": 730
},
{
"epoch": 1.2534406097819182,
"grad_norm": 1.6171875,
"learning_rate": 9.952883285447622e-06,
"loss": 1.2602,
"step": 740
},
{
"epoch": 1.2703789964005927,
"grad_norm": 1.796875,
"learning_rate": 9.948274619167608e-06,
"loss": 1.2353,
"step": 750
},
{
"epoch": 1.2873173830192675,
"grad_norm": 1.6953125,
"learning_rate": 9.943452118319165e-06,
"loss": 1.2749,
"step": 760
},
{
"epoch": 1.304255769637942,
"grad_norm": 1.765625,
"learning_rate": 9.938415991301675e-06,
"loss": 1.2432,
"step": 770
},
{
"epoch": 1.3211941562566165,
"grad_norm": 1.625,
"learning_rate": 9.933166455746159e-06,
"loss": 1.2824,
"step": 780
},
{
"epoch": 1.3381325428752913,
"grad_norm": 1.5703125,
"learning_rate": 9.927703738505863e-06,
"loss": 1.2335,
"step": 790
},
{
"epoch": 1.3550709294939658,
"grad_norm": 1.5625,
"learning_rate": 9.922028075646465e-06,
"loss": 1.3065,
"step": 800
},
{
"epoch": 1.3720093161126403,
"grad_norm": 1.5703125,
"learning_rate": 9.91613971243586e-06,
"loss": 1.2443,
"step": 810
},
{
"epoch": 1.3889477027313148,
"grad_norm": 1.578125,
"learning_rate": 9.910038903333574e-06,
"loss": 1.2303,
"step": 820
},
{
"epoch": 1.4058860893499894,
"grad_norm": 1.90625,
"learning_rate": 9.90372591197976e-06,
"loss": 1.1844,
"step": 830
},
{
"epoch": 1.422824475968664,
"grad_norm": 1.53125,
"learning_rate": 9.89720101118381e-06,
"loss": 1.2035,
"step": 840
},
{
"epoch": 1.4397628625873384,
"grad_norm": 1.421875,
"learning_rate": 9.890464482912561e-06,
"loss": 1.2399,
"step": 850
},
{
"epoch": 1.4567012492060132,
"grad_norm": 1.578125,
"learning_rate": 9.88351661827811e-06,
"loss": 1.227,
"step": 860
},
{
"epoch": 1.4736396358246877,
"grad_norm": 1.484375,
"learning_rate": 9.876357717525245e-06,
"loss": 1.2589,
"step": 870
},
{
"epoch": 1.4905780224433622,
"grad_norm": 1.84375,
"learning_rate": 9.86898809001845e-06,
"loss": 1.1935,
"step": 880
},
{
"epoch": 1.507516409062037,
"grad_norm": 1.5,
"learning_rate": 9.861408054228556e-06,
"loss": 1.2177,
"step": 890
},
{
"epoch": 1.5244547956807115,
"grad_norm": 1.3828125,
"learning_rate": 9.853617937718966e-06,
"loss": 1.2285,
"step": 900
},
{
"epoch": 1.541393182299386,
"grad_norm": 1.3515625,
"learning_rate": 9.845618077131508e-06,
"loss": 1.226,
"step": 910
},
{
"epoch": 1.5583315689180606,
"grad_norm": 1.421875,
"learning_rate": 9.837408818171884e-06,
"loss": 1.2536,
"step": 920
},
{
"epoch": 1.575269955536735,
"grad_norm": 1.2578125,
"learning_rate": 9.82899051559472e-06,
"loss": 1.2041,
"step": 930
},
{
"epoch": 1.5922083421554096,
"grad_norm": 1.515625,
"learning_rate": 9.820363533188257e-06,
"loss": 1.2279,
"step": 940
},
{
"epoch": 1.6091467287740842,
"grad_norm": 1.2421875,
"learning_rate": 9.811528243758612e-06,
"loss": 1.1829,
"step": 950
},
{
"epoch": 1.6260851153927587,
"grad_norm": 1.6015625,
"learning_rate": 9.802485029113682e-06,
"loss": 1.1698,
"step": 960
},
{
"epoch": 1.6430235020114334,
"grad_norm": 1.234375,
"learning_rate": 9.793234280046627e-06,
"loss": 1.2212,
"step": 970
},
{
"epoch": 1.659961888630108,
"grad_norm": 1.234375,
"learning_rate": 9.783776396318998e-06,
"loss": 1.1377,
"step": 980
},
{
"epoch": 1.6769002752487827,
"grad_norm": 1.3203125,
"learning_rate": 9.77411178664346e-06,
"loss": 1.2086,
"step": 990
},
{
"epoch": 1.6938386618674572,
"grad_norm": 1.3125,
"learning_rate": 9.76424086866612e-06,
"loss": 1.1764,
"step": 1000
},
{
"epoch": 1.6938386618674572,
"eval_loss": 1.2227357625961304,
"eval_runtime": 20.8763,
"eval_samples_per_second": 25.148,
"eval_steps_per_second": 25.148,
"step": 1000
},
{
"epoch": 1.7107770484861318,
"grad_norm": 1.2421875,
"learning_rate": 9.754164068948482e-06,
"loss": 1.1537,
"step": 1010
},
{
"epoch": 1.7277154351048063,
"grad_norm": 1.453125,
"learning_rate": 9.743881822949025e-06,
"loss": 1.2405,
"step": 1020
},
{
"epoch": 1.7446538217234808,
"grad_norm": 1.1875,
"learning_rate": 9.733394575004373e-06,
"loss": 1.2403,
"step": 1030
},
{
"epoch": 1.7615922083421554,
"grad_norm": 1.40625,
"learning_rate": 9.722702778310096e-06,
"loss": 1.152,
"step": 1040
},
{
"epoch": 1.7785305949608299,
"grad_norm": 1.265625,
"learning_rate": 9.711806894901124e-06,
"loss": 1.1596,
"step": 1050
},
{
"epoch": 1.7954689815795044,
"grad_norm": 1.25,
"learning_rate": 9.700707395631787e-06,
"loss": 1.1555,
"step": 1060
},
{
"epoch": 1.8124073681981792,
"grad_norm": 1.171875,
"learning_rate": 9.689404760155465e-06,
"loss": 1.1611,
"step": 1070
},
{
"epoch": 1.8293457548168537,
"grad_norm": 1.640625,
"learning_rate": 9.677899476903857e-06,
"loss": 1.1727,
"step": 1080
},
{
"epoch": 1.8462841414355282,
"grad_norm": 1.2265625,
"learning_rate": 9.666192043065875e-06,
"loss": 1.1526,
"step": 1090
},
{
"epoch": 1.863222528054203,
"grad_norm": 1.265625,
"learning_rate": 9.654282964566159e-06,
"loss": 1.1677,
"step": 1100
},
{
"epoch": 1.8801609146728775,
"grad_norm": 1.1953125,
"learning_rate": 9.642172756043217e-06,
"loss": 1.1363,
"step": 1110
},
{
"epoch": 1.897099301291552,
"grad_norm": 1.1953125,
"learning_rate": 9.629861940827181e-06,
"loss": 1.1216,
"step": 1120
},
{
"epoch": 1.9140376879102265,
"grad_norm": 1.2109375,
"learning_rate": 9.617351050917197e-06,
"loss": 1.1432,
"step": 1130
},
{
"epoch": 1.930976074528901,
"grad_norm": 1.0859375,
"learning_rate": 9.604640626958428e-06,
"loss": 1.1714,
"step": 1140
},
{
"epoch": 1.9479144611475756,
"grad_norm": 1.21875,
"learning_rate": 9.591731218218694e-06,
"loss": 1.1999,
"step": 1150
},
{
"epoch": 1.9648528477662501,
"grad_norm": 1.078125,
"learning_rate": 9.57862338256474e-06,
"loss": 1.1692,
"step": 1160
},
{
"epoch": 1.9817912343849249,
"grad_norm": 1.125,
"learning_rate": 9.565317686438122e-06,
"loss": 1.1193,
"step": 1170
},
{
"epoch": 1.9987296210035994,
"grad_norm": 0.92578125,
"learning_rate": 9.551814704830734e-06,
"loss": 1.1596,
"step": 1180
},
{
"epoch": 2.015668007622274,
"grad_norm": 0.96875,
"learning_rate": 9.538115021259956e-06,
"loss": 1.0715,
"step": 1190
},
{
"epoch": 2.0326063942409487,
"grad_norm": 0.9375,
"learning_rate": 9.52421922774344e-06,
"loss": 1.0312,
"step": 1200
},
{
"epoch": 2.049544780859623,
"grad_norm": 0.9609375,
"learning_rate": 9.510127924773529e-06,
"loss": 1.0574,
"step": 1210
},
{
"epoch": 2.0664831674782977,
"grad_norm": 0.95703125,
"learning_rate": 9.4958417212913e-06,
"loss": 1.0862,
"step": 1220
},
{
"epoch": 2.0834215540969723,
"grad_norm": 0.96875,
"learning_rate": 9.481361234660258e-06,
"loss": 1.0776,
"step": 1230
},
{
"epoch": 2.100359940715647,
"grad_norm": 0.95703125,
"learning_rate": 9.466687090639652e-06,
"loss": 1.088,
"step": 1240
},
{
"epoch": 2.1172983273343213,
"grad_norm": 0.9296875,
"learning_rate": 9.451819923357435e-06,
"loss": 1.1108,
"step": 1250
},
{
"epoch": 2.134236713952996,
"grad_norm": 0.9921875,
"learning_rate": 9.436760375282858e-06,
"loss": 1.0786,
"step": 1260
},
{
"epoch": 2.1511751005716704,
"grad_norm": 0.87890625,
"learning_rate": 9.421509097198715e-06,
"loss": 1.0269,
"step": 1270
},
{
"epoch": 2.168113487190345,
"grad_norm": 1.171875,
"learning_rate": 9.406066748173208e-06,
"loss": 1.1046,
"step": 1280
},
{
"epoch": 2.18505187380902,
"grad_norm": 0.9140625,
"learning_rate": 9.390433995531473e-06,
"loss": 1.0651,
"step": 1290
},
{
"epoch": 2.2019902604276944,
"grad_norm": 0.91796875,
"learning_rate": 9.374611514826746e-06,
"loss": 1.0212,
"step": 1300
},
{
"epoch": 2.218928647046369,
"grad_norm": 1.03125,
"learning_rate": 9.358599989811158e-06,
"loss": 1.0786,
"step": 1310
},
{
"epoch": 2.2358670336650435,
"grad_norm": 1.046875,
"learning_rate": 9.3424001124062e-06,
"loss": 1.0125,
"step": 1320
},
{
"epoch": 2.252805420283718,
"grad_norm": 0.90234375,
"learning_rate": 9.326012582672814e-06,
"loss": 1.0756,
"step": 1330
},
{
"epoch": 2.2697438069023925,
"grad_norm": 1.015625,
"learning_rate": 9.309438108781143e-06,
"loss": 1.1165,
"step": 1340
},
{
"epoch": 2.286682193521067,
"grad_norm": 0.9609375,
"learning_rate": 9.29267740697993e-06,
"loss": 1.037,
"step": 1350
},
{
"epoch": 2.3036205801397416,
"grad_norm": 1.25,
"learning_rate": 9.275731201565564e-06,
"loss": 1.03,
"step": 1360
},
{
"epoch": 2.320558966758416,
"grad_norm": 0.93359375,
"learning_rate": 9.258600224850777e-06,
"loss": 1.0461,
"step": 1370
},
{
"epoch": 2.337497353377091,
"grad_norm": 1.2421875,
"learning_rate": 9.24128521713301e-06,
"loss": 1.0065,
"step": 1380
},
{
"epoch": 2.3544357399957656,
"grad_norm": 1.0234375,
"learning_rate": 9.223786926662404e-06,
"loss": 1.0661,
"step": 1390
},
{
"epoch": 2.37137412661444,
"grad_norm": 1.0078125,
"learning_rate": 9.20610610960948e-06,
"loss": 1.0764,
"step": 1400
},
{
"epoch": 2.3883125132331147,
"grad_norm": 1.015625,
"learning_rate": 9.188243530032458e-06,
"loss": 1.0448,
"step": 1410
},
{
"epoch": 2.405250899851789,
"grad_norm": 0.984375,
"learning_rate": 9.170199959844232e-06,
"loss": 1.0695,
"step": 1420
},
{
"epoch": 2.4221892864704637,
"grad_norm": 1.015625,
"learning_rate": 9.15197617877902e-06,
"loss": 0.9995,
"step": 1430
},
{
"epoch": 2.4391276730891382,
"grad_norm": 1.015625,
"learning_rate": 9.133572974358669e-06,
"loss": 1.0307,
"step": 1440
},
{
"epoch": 2.4560660597078128,
"grad_norm": 1.1328125,
"learning_rate": 9.114991141858619e-06,
"loss": 1.0004,
"step": 1450
},
{
"epoch": 2.4730044463264873,
"grad_norm": 1.390625,
"learning_rate": 9.096231484273535e-06,
"loss": 1.0553,
"step": 1460
},
{
"epoch": 2.489942832945162,
"grad_norm": 1.1328125,
"learning_rate": 9.077294812282615e-06,
"loss": 1.0481,
"step": 1470
},
{
"epoch": 2.5068812195638364,
"grad_norm": 1.203125,
"learning_rate": 9.058181944214547e-06,
"loss": 1.0438,
"step": 1480
},
{
"epoch": 2.523819606182511,
"grad_norm": 1.0234375,
"learning_rate": 9.038893706012153e-06,
"loss": 1.0458,
"step": 1490
},
{
"epoch": 2.5407579928011854,
"grad_norm": 1.4765625,
"learning_rate": 9.01943093119669e-06,
"loss": 0.9808,
"step": 1500
},
{
"epoch": 2.5407579928011854,
"eval_loss": 1.1499994993209839,
"eval_runtime": 21.1383,
"eval_samples_per_second": 24.836,
"eval_steps_per_second": 24.836,
"step": 1500
},
{
"epoch": 2.5576963794198604,
"grad_norm": 1.4609375,
"learning_rate": 8.999794460831836e-06,
"loss": 1.0539,
"step": 1510
},
{
"epoch": 2.574634766038535,
"grad_norm": 1.6640625,
"learning_rate": 8.979985143487343e-06,
"loss": 1.046,
"step": 1520
},
{
"epoch": 2.5915731526572094,
"grad_norm": 1.4140625,
"learning_rate": 8.960003835202369e-06,
"loss": 1.0188,
"step": 1530
},
{
"epoch": 2.608511539275884,
"grad_norm": 1.453125,
"learning_rate": 8.939851399448478e-06,
"loss": 1.0423,
"step": 1540
},
{
"epoch": 2.6254499258945585,
"grad_norm": 2.921875,
"learning_rate": 8.919528707092333e-06,
"loss": 1.085,
"step": 1550
},
{
"epoch": 2.642388312513233,
"grad_norm": 2.859375,
"learning_rate": 8.899036636358058e-06,
"loss": 1.0252,
"step": 1560
},
{
"epoch": 2.6593266991319076,
"grad_norm": 3.125,
"learning_rate": 8.878376072789292e-06,
"loss": 1.0246,
"step": 1570
},
{
"epoch": 2.6762650857505825,
"grad_norm": 2.78125,
"learning_rate": 8.857547909210913e-06,
"loss": 1.0358,
"step": 1580
},
{
"epoch": 2.693203472369257,
"grad_norm": 2.59375,
"learning_rate": 8.836553045690462e-06,
"loss": 1.0746,
"step": 1590
},
{
"epoch": 2.7101418589879316,
"grad_norm": 3.96875,
"learning_rate": 8.81539238949925e-06,
"loss": 1.0806,
"step": 1600
},
{
"epoch": 2.727080245606606,
"grad_norm": 4.15625,
"learning_rate": 8.794066855073136e-06,
"loss": 0.9917,
"step": 1610
},
{
"epoch": 2.7440186322252806,
"grad_norm": 4.5625,
"learning_rate": 8.772577363973032e-06,
"loss": 1.0071,
"step": 1620
},
{
"epoch": 2.760957018843955,
"grad_norm": 3.25,
"learning_rate": 8.750924844845065e-06,
"loss": 1.0139,
"step": 1630
},
{
"epoch": 2.7778954054626297,
"grad_norm": 4.25,
"learning_rate": 8.729110233380445e-06,
"loss": 1.0684,
"step": 1640
},
{
"epoch": 2.7948337920813042,
"grad_norm": 9.125,
"learning_rate": 8.707134472275042e-06,
"loss": 0.9606,
"step": 1650
},
{
"epoch": 2.8117721786999788,
"grad_norm": 7.78125,
"learning_rate": 8.684998511188634e-06,
"loss": 1.0529,
"step": 1660
},
{
"epoch": 2.8287105653186533,
"grad_norm": 7.34375,
"learning_rate": 8.662703306703878e-06,
"loss": 1.0939,
"step": 1670
},
{
"epoch": 2.845648951937328,
"grad_norm": 8.375,
"learning_rate": 8.640249822284975e-06,
"loss": 1.0322,
"step": 1680
},
{
"epoch": 2.8625873385560023,
"grad_norm": 7.40625,
"learning_rate": 8.617639028236021e-06,
"loss": 1.0303,
"step": 1690
},
{
"epoch": 2.879525725174677,
"grad_norm": 2.625,
"learning_rate": 8.59487190165909e-06,
"loss": 1.0173,
"step": 1700
},
{
"epoch": 2.896464111793352,
"grad_norm": 2.328125,
"learning_rate": 8.571949426412002e-06,
"loss": 1.0218,
"step": 1710
},
{
"epoch": 2.9134024984120264,
"grad_norm": 2.140625,
"learning_rate": 8.54887259306581e-06,
"loss": 1.0029,
"step": 1720
},
{
"epoch": 2.930340885030701,
"grad_norm": 2.28125,
"learning_rate": 8.525642398861997e-06,
"loss": 1.0295,
"step": 1730
},
{
"epoch": 2.9472792716493754,
"grad_norm": 2.5,
"learning_rate": 8.50225984766937e-06,
"loss": 1.0432,
"step": 1740
},
{
"epoch": 2.96421765826805,
"grad_norm": 2.0625,
"learning_rate": 8.478725949940687e-06,
"loss": 1.026,
"step": 1750
},
{
"epoch": 2.9811560448867245,
"grad_norm": 2.265625,
"learning_rate": 8.455041722668993e-06,
"loss": 1.0523,
"step": 1760
},
{
"epoch": 2.998094431505399,
"grad_norm": 1.8671875,
"learning_rate": 8.43120818934367e-06,
"loss": 1.0287,
"step": 1770
},
{
"epoch": 3.0150328181240735,
"grad_norm": 2.109375,
"learning_rate": 8.407226379906197e-06,
"loss": 0.9998,
"step": 1780
},
{
"epoch": 3.031971204742748,
"grad_norm": 2.140625,
"learning_rate": 8.383097330705664e-06,
"loss": 1.0157,
"step": 1790
},
{
"epoch": 3.048909591361423,
"grad_norm": 1.8828125,
"learning_rate": 8.358822084453964e-06,
"loss": 0.9514,
"step": 1800
},
{
"epoch": 3.0658479779800976,
"grad_norm": 1.796875,
"learning_rate": 8.334401690180755e-06,
"loss": 1.0163,
"step": 1810
},
{
"epoch": 3.082786364598772,
"grad_norm": 1.8828125,
"learning_rate": 8.30983720318811e-06,
"loss": 0.969,
"step": 1820
},
{
"epoch": 3.0997247512174466,
"grad_norm": 1.8203125,
"learning_rate": 8.285129685004922e-06,
"loss": 0.9769,
"step": 1830
},
{
"epoch": 3.116663137836121,
"grad_norm": 2.09375,
"learning_rate": 8.260280203341026e-06,
"loss": 0.9997,
"step": 1840
},
{
"epoch": 3.1336015244547957,
"grad_norm": 1.828125,
"learning_rate": 8.235289832041066e-06,
"loss": 0.9577,
"step": 1850
},
{
"epoch": 3.15053991107347,
"grad_norm": 2.4375,
"learning_rate": 8.210159651038081e-06,
"loss": 0.9457,
"step": 1860
},
{
"epoch": 3.1674782976921447,
"grad_norm": 1.7734375,
"learning_rate": 8.184890746306848e-06,
"loss": 0.9381,
"step": 1870
},
{
"epoch": 3.1844166843108193,
"grad_norm": 1.96875,
"learning_rate": 8.15948420981694e-06,
"loss": 0.9709,
"step": 1880
},
{
"epoch": 3.201355070929494,
"grad_norm": 1.8125,
"learning_rate": 8.133941139485551e-06,
"loss": 0.9769,
"step": 1890
},
{
"epoch": 3.2182934575481683,
"grad_norm": 1.875,
"learning_rate": 8.108262639130038e-06,
"loss": 0.9064,
"step": 1900
},
{
"epoch": 3.2352318441668433,
"grad_norm": 1.71875,
"learning_rate": 8.082449818420227e-06,
"loss": 0.9944,
"step": 1910
},
{
"epoch": 3.252170230785518,
"grad_norm": 2.125,
"learning_rate": 8.056503792830458e-06,
"loss": 0.9484,
"step": 1920
},
{
"epoch": 3.2691086174041923,
"grad_norm": 1.8046875,
"learning_rate": 8.030425683591391e-06,
"loss": 0.9666,
"step": 1930
},
{
"epoch": 3.286047004022867,
"grad_norm": 1.890625,
"learning_rate": 8.004216617641532e-06,
"loss": 0.9871,
"step": 1940
},
{
"epoch": 3.3029853906415414,
"grad_norm": 1.765625,
"learning_rate": 7.977877727578556e-06,
"loss": 0.9462,
"step": 1950
},
{
"epoch": 3.319923777260216,
"grad_norm": 2.09375,
"learning_rate": 7.951410151610343e-06,
"loss": 0.9509,
"step": 1960
},
{
"epoch": 3.3368621638788905,
"grad_norm": 1.7734375,
"learning_rate": 7.92481503350581e-06,
"loss": 0.9299,
"step": 1970
},
{
"epoch": 3.353800550497565,
"grad_norm": 1.734375,
"learning_rate": 7.898093522545471e-06,
"loss": 0.9824,
"step": 1980
},
{
"epoch": 3.3707389371162395,
"grad_norm": 1.78125,
"learning_rate": 7.871246773471779e-06,
"loss": 0.9336,
"step": 1990
},
{
"epoch": 3.3876773237349145,
"grad_norm": 2.015625,
"learning_rate": 7.844275946439216e-06,
"loss": 0.9778,
"step": 2000
},
{
"epoch": 3.3876773237349145,
"eval_loss": 1.1204777956008911,
"eval_runtime": 20.947,
"eval_samples_per_second": 25.063,
"eval_steps_per_second": 25.063,
"step": 2000
},
{
"epoch": 3.404615710353589,
"grad_norm": 2.359375,
"learning_rate": 7.817182206964177e-06,
"loss": 0.9817,
"step": 2010
},
{
"epoch": 3.4215540969722635,
"grad_norm": 2.0625,
"learning_rate": 7.789966725874578e-06,
"loss": 0.9942,
"step": 2020
},
{
"epoch": 3.438492483590938,
"grad_norm": 1.734375,
"learning_rate": 7.76263067925928e-06,
"loss": 0.9783,
"step": 2030
},
{
"epoch": 3.4554308702096126,
"grad_norm": 1.796875,
"learning_rate": 7.735175248417255e-06,
"loss": 0.9551,
"step": 2040
},
{
"epoch": 3.472369256828287,
"grad_norm": 2.046875,
"learning_rate": 7.707601619806547e-06,
"loss": 0.9592,
"step": 2050
},
{
"epoch": 3.4893076434469616,
"grad_norm": 2.40625,
"learning_rate": 7.679910984992988e-06,
"loss": 0.902,
"step": 2060
},
{
"epoch": 3.506246030065636,
"grad_norm": 2.203125,
"learning_rate": 7.652104540598712e-06,
"loss": 0.9763,
"step": 2070
},
{
"epoch": 3.5231844166843107,
"grad_norm": 2.0625,
"learning_rate": 7.6241834882504516e-06,
"loss": 0.9522,
"step": 2080
},
{
"epoch": 3.5401228033029852,
"grad_norm": 2.140625,
"learning_rate": 7.596149034527594e-06,
"loss": 0.992,
"step": 2090
},
{
"epoch": 3.5570611899216598,
"grad_norm": 1.859375,
"learning_rate": 7.5680023909100545e-06,
"loss": 0.9366,
"step": 2100
},
{
"epoch": 3.5739995765403343,
"grad_norm": 1.7421875,
"learning_rate": 7.539744773725916e-06,
"loss": 0.9888,
"step": 2110
},
{
"epoch": 3.5909379631590093,
"grad_norm": 1.71875,
"learning_rate": 7.511377404098876e-06,
"loss": 0.9801,
"step": 2120
},
{
"epoch": 3.607876349777684,
"grad_norm": 2.0,
"learning_rate": 7.482901507895463e-06,
"loss": 0.9589,
"step": 2130
},
{
"epoch": 3.6248147363963583,
"grad_norm": 3.65625,
"learning_rate": 7.454318315672074e-06,
"loss": 0.9926,
"step": 2140
},
{
"epoch": 3.641753123015033,
"grad_norm": 1.859375,
"learning_rate": 7.425629062621784e-06,
"loss": 0.9448,
"step": 2150
},
{
"epoch": 3.6586915096337074,
"grad_norm": 2.140625,
"learning_rate": 7.396834988520989e-06,
"loss": 0.9507,
"step": 2160
},
{
"epoch": 3.675629896252382,
"grad_norm": 2.125,
"learning_rate": 7.36793733767581e-06,
"loss": 0.9719,
"step": 2170
},
{
"epoch": 3.6925682828710564,
"grad_norm": 2.34375,
"learning_rate": 7.338937358868334e-06,
"loss": 0.976,
"step": 2180
},
{
"epoch": 3.709506669489731,
"grad_norm": 2.28125,
"learning_rate": 7.309836305302638e-06,
"loss": 0.9431,
"step": 2190
},
{
"epoch": 3.726445056108406,
"grad_norm": 1.734375,
"learning_rate": 7.2806354345506525e-06,
"loss": 0.9833,
"step": 2200
},
{
"epoch": 3.7433834427270805,
"grad_norm": 1.859375,
"learning_rate": 7.251336008497793e-06,
"loss": 0.9356,
"step": 2210
},
{
"epoch": 3.760321829345755,
"grad_norm": 1.8203125,
"learning_rate": 7.221939293288445e-06,
"loss": 0.9911,
"step": 2220
},
{
"epoch": 3.7772602159644295,
"grad_norm": 1.8671875,
"learning_rate": 7.192446559271247e-06,
"loss": 0.9569,
"step": 2230
},
{
"epoch": 3.794198602583104,
"grad_norm": 1.875,
"learning_rate": 7.162859080944182e-06,
"loss": 0.9907,
"step": 2240
},
{
"epoch": 3.8111369892017786,
"grad_norm": 1.6484375,
"learning_rate": 7.133178136899522e-06,
"loss": 0.9672,
"step": 2250
},
{
"epoch": 3.828075375820453,
"grad_norm": 2.359375,
"learning_rate": 7.103405009768554e-06,
"loss": 0.9754,
"step": 2260
},
{
"epoch": 3.8450137624391276,
"grad_norm": 1.7421875,
"learning_rate": 7.073540986166166e-06,
"loss": 0.9548,
"step": 2270
},
{
"epoch": 3.861952149057802,
"grad_norm": 1.7421875,
"learning_rate": 7.0435873566352434e-06,
"loss": 0.9682,
"step": 2280
},
{
"epoch": 3.8788905356764767,
"grad_norm": 1.8515625,
"learning_rate": 7.013545415590897e-06,
"loss": 0.8956,
"step": 2290
},
{
"epoch": 3.895828922295151,
"grad_norm": 1.96875,
"learning_rate": 6.983416461264527e-06,
"loss": 0.9314,
"step": 2300
},
{
"epoch": 3.9127673089138257,
"grad_norm": 1.890625,
"learning_rate": 6.953201795647728e-06,
"loss": 0.9101,
"step": 2310
},
{
"epoch": 3.9297056955325003,
"grad_norm": 1.921875,
"learning_rate": 6.922902724436016e-06,
"loss": 0.9463,
"step": 2320
},
{
"epoch": 3.9466440821511752,
"grad_norm": 1.8984375,
"learning_rate": 6.892520556972404e-06,
"loss": 0.9443,
"step": 2330
},
{
"epoch": 3.9635824687698498,
"grad_norm": 1.8125,
"learning_rate": 6.86205660619083e-06,
"loss": 0.9371,
"step": 2340
},
{
"epoch": 3.9805208553885243,
"grad_norm": 1.9375,
"learning_rate": 6.8315121885594105e-06,
"loss": 0.95,
"step": 2350
},
{
"epoch": 3.997459242007199,
"grad_norm": 1.9140625,
"learning_rate": 6.800888624023552e-06,
"loss": 0.9412,
"step": 2360
},
{
"epoch": 4.014397628625874,
"grad_norm": 1.59375,
"learning_rate": 6.770187235948916e-06,
"loss": 0.8627,
"step": 2370
},
{
"epoch": 4.031336015244548,
"grad_norm": 1.671875,
"learning_rate": 6.739409351064225e-06,
"loss": 0.8663,
"step": 2380
},
{
"epoch": 4.048274401863223,
"grad_norm": 1.8046875,
"learning_rate": 6.708556299403935e-06,
"loss": 0.8087,
"step": 2390
},
{
"epoch": 4.065212788481897,
"grad_norm": 1.7578125,
"learning_rate": 6.6776294142507535e-06,
"loss": 0.8501,
"step": 2400
},
{
"epoch": 4.082151175100572,
"grad_norm": 1.6328125,
"learning_rate": 6.646630032078024e-06,
"loss": 0.854,
"step": 2410
},
{
"epoch": 4.099089561719246,
"grad_norm": 1.5,
"learning_rate": 6.615559492491981e-06,
"loss": 0.884,
"step": 2420
},
{
"epoch": 4.116027948337921,
"grad_norm": 1.890625,
"learning_rate": 6.584419138173846e-06,
"loss": 0.8693,
"step": 2430
},
{
"epoch": 4.1329663349565955,
"grad_norm": 1.625,
"learning_rate": 6.553210314821814e-06,
"loss": 0.8397,
"step": 2440
},
{
"epoch": 4.14990472157527,
"grad_norm": 1.7265625,
"learning_rate": 6.521934371092901e-06,
"loss": 0.8355,
"step": 2450
},
{
"epoch": 4.1668431081939445,
"grad_norm": 1.5,
"learning_rate": 6.490592658544661e-06,
"loss": 0.8784,
"step": 2460
},
{
"epoch": 4.183781494812619,
"grad_norm": 1.484375,
"learning_rate": 6.459186531576772e-06,
"loss": 0.8902,
"step": 2470
},
{
"epoch": 4.200719881431294,
"grad_norm": 1.640625,
"learning_rate": 6.427717347372523e-06,
"loss": 0.8175,
"step": 2480
},
{
"epoch": 4.217658268049968,
"grad_norm": 1.7109375,
"learning_rate": 6.396186465840152e-06,
"loss": 0.8717,
"step": 2490
},
{
"epoch": 4.234596654668643,
"grad_norm": 1.5234375,
"learning_rate": 6.364595249554084e-06,
"loss": 0.8815,
"step": 2500
},
{
"epoch": 4.234596654668643,
"eval_loss": 1.09398353099823,
"eval_runtime": 20.9341,
"eval_samples_per_second": 25.079,
"eval_steps_per_second": 25.079,
"step": 2500
},
{
"epoch": 4.251535041287317,
"grad_norm": 1.4296875,
"learning_rate": 6.332945063696049e-06,
"loss": 0.8658,
"step": 2510
},
{
"epoch": 4.268473427905992,
"grad_norm": 1.3828125,
"learning_rate": 6.3012372759960825e-06,
"loss": 0.8465,
"step": 2520
},
{
"epoch": 4.285411814524666,
"grad_norm": 1.5234375,
"learning_rate": 6.26947325667343e-06,
"loss": 0.8707,
"step": 2530
},
{
"epoch": 4.302350201143341,
"grad_norm": 1.4140625,
"learning_rate": 6.237654378377324e-06,
"loss": 0.8921,
"step": 2540
},
{
"epoch": 4.319288587762015,
"grad_norm": 1.5390625,
"learning_rate": 6.2057820161276715e-06,
"loss": 0.8703,
"step": 2550
},
{
"epoch": 4.33622697438069,
"grad_norm": 1.2734375,
"learning_rate": 6.173857547255634e-06,
"loss": 0.8467,
"step": 2560
},
{
"epoch": 4.353165360999364,
"grad_norm": 1.578125,
"learning_rate": 6.141882351344106e-06,
"loss": 0.8581,
"step": 2570
},
{
"epoch": 4.37010374761804,
"grad_norm": 1.4609375,
"learning_rate": 6.1098578101680986e-06,
"loss": 0.8617,
"step": 2580
},
{
"epoch": 4.387042134236714,
"grad_norm": 1.3046875,
"learning_rate": 6.077785307635027e-06,
"loss": 0.8082,
"step": 2590
},
{
"epoch": 4.403980520855389,
"grad_norm": 1.34375,
"learning_rate": 6.045666229724909e-06,
"loss": 0.8048,
"step": 2600
},
{
"epoch": 4.420918907474063,
"grad_norm": 1.4453125,
"learning_rate": 6.013501964430468e-06,
"loss": 0.8537,
"step": 2610
},
{
"epoch": 4.437857294092738,
"grad_norm": 1.3359375,
"learning_rate": 5.981293901697153e-06,
"loss": 0.8683,
"step": 2620
},
{
"epoch": 4.454795680711412,
"grad_norm": 1.1953125,
"learning_rate": 5.9490434333630735e-06,
"loss": 0.8599,
"step": 2630
},
{
"epoch": 4.471734067330087,
"grad_norm": 1.3203125,
"learning_rate": 5.916751953098854e-06,
"loss": 0.8811,
"step": 2640
},
{
"epoch": 4.4886724539487615,
"grad_norm": 1.21875,
"learning_rate": 5.884420856347406e-06,
"loss": 0.8891,
"step": 2650
},
{
"epoch": 4.505610840567436,
"grad_norm": 1.3515625,
"learning_rate": 5.852051540263628e-06,
"loss": 0.8634,
"step": 2660
},
{
"epoch": 4.5225492271861105,
"grad_norm": 1.28125,
"learning_rate": 5.819645403654027e-06,
"loss": 0.842,
"step": 2670
},
{
"epoch": 4.539487613804785,
"grad_norm": 1.6484375,
"learning_rate": 5.787203846916273e-06,
"loss": 0.8485,
"step": 2680
},
{
"epoch": 4.55642600042346,
"grad_norm": 1.265625,
"learning_rate": 5.754728271978675e-06,
"loss": 0.883,
"step": 2690
},
{
"epoch": 4.573364387042134,
"grad_norm": 1.2265625,
"learning_rate": 5.722220082239608e-06,
"loss": 0.8457,
"step": 2700
},
{
"epoch": 4.590302773660809,
"grad_norm": 1.1640625,
"learning_rate": 5.689680682506861e-06,
"loss": 0.8999,
"step": 2710
},
{
"epoch": 4.607241160279483,
"grad_norm": 1.171875,
"learning_rate": 5.657111478936926e-06,
"loss": 0.8391,
"step": 2720
},
{
"epoch": 4.624179546898158,
"grad_norm": 1.2265625,
"learning_rate": 5.6245138789742444e-06,
"loss": 0.8313,
"step": 2730
},
{
"epoch": 4.641117933516832,
"grad_norm": 1.1640625,
"learning_rate": 5.591889291290373e-06,
"loss": 0.8709,
"step": 2740
},
{
"epoch": 4.658056320135507,
"grad_norm": 0.9921875,
"learning_rate": 5.559239125723119e-06,
"loss": 0.8507,
"step": 2750
},
{
"epoch": 4.674994706754182,
"grad_norm": 0.96484375,
"learning_rate": 5.5265647932156066e-06,
"loss": 0.9057,
"step": 2760
},
{
"epoch": 4.691933093372857,
"grad_norm": 1.1171875,
"learning_rate": 5.493867705755312e-06,
"loss": 0.8386,
"step": 2770
},
{
"epoch": 4.708871479991531,
"grad_norm": 1.0,
"learning_rate": 5.461149276313046e-06,
"loss": 0.9338,
"step": 2780
},
{
"epoch": 4.725809866610206,
"grad_norm": 1.0234375,
"learning_rate": 5.428410918781885e-06,
"loss": 0.8176,
"step": 2790
},
{
"epoch": 4.74274825322888,
"grad_norm": 1.046875,
"learning_rate": 5.395654047916083e-06,
"loss": 0.8609,
"step": 2800
},
{
"epoch": 4.759686639847555,
"grad_norm": 1.09375,
"learning_rate": 5.362880079269921e-06,
"loss": 0.8494,
"step": 2810
},
{
"epoch": 4.776625026466229,
"grad_norm": 1.1953125,
"learning_rate": 5.330090429136553e-06,
"loss": 0.8254,
"step": 2820
},
{
"epoch": 4.793563413084904,
"grad_norm": 1.2890625,
"learning_rate": 5.297286514486785e-06,
"loss": 0.8689,
"step": 2830
},
{
"epoch": 4.810501799703578,
"grad_norm": 1.125,
"learning_rate": 5.26446975290785e-06,
"loss": 0.8483,
"step": 2840
},
{
"epoch": 4.827440186322253,
"grad_norm": 0.98046875,
"learning_rate": 5.23164156254215e-06,
"loss": 0.8436,
"step": 2850
},
{
"epoch": 4.844378572940927,
"grad_norm": 0.91796875,
"learning_rate": 5.19880336202597e-06,
"loss": 0.8584,
"step": 2860
},
{
"epoch": 4.861316959559602,
"grad_norm": 1.2265625,
"learning_rate": 5.165956570428173e-06,
"loss": 0.8122,
"step": 2870
},
{
"epoch": 4.8782553461782765,
"grad_norm": 1.0703125,
"learning_rate": 5.133102607188875e-06,
"loss": 0.8705,
"step": 2880
},
{
"epoch": 4.895193732796951,
"grad_norm": 1.34375,
"learning_rate": 5.100242892058108e-06,
"loss": 0.8788,
"step": 2890
},
{
"epoch": 4.9121321194156256,
"grad_norm": 1.015625,
"learning_rate": 5.067378845034468e-06,
"loss": 0.8663,
"step": 2900
},
{
"epoch": 4.9290705060343,
"grad_norm": 1.046875,
"learning_rate": 5.034511886303746e-06,
"loss": 0.8706,
"step": 2910
},
{
"epoch": 4.946008892652975,
"grad_norm": 1.1171875,
"learning_rate": 5.00164343617756e-06,
"loss": 0.845,
"step": 2920
},
{
"epoch": 4.962947279271649,
"grad_norm": 1.0078125,
"learning_rate": 4.968774915031982e-06,
"loss": 0.8165,
"step": 2930
},
{
"epoch": 4.979885665890324,
"grad_norm": 1.3125,
"learning_rate": 4.9359077432461455e-06,
"loss": 0.8719,
"step": 2940
},
{
"epoch": 4.996824052508998,
"grad_norm": 0.88671875,
"learning_rate": 4.903043341140879e-06,
"loss": 0.8579,
"step": 2950
},
{
"epoch": 5.013762439127673,
"grad_norm": 0.921875,
"learning_rate": 4.870183128917318e-06,
"loss": 0.7986,
"step": 2960
},
{
"epoch": 5.030700825746347,
"grad_norm": 1.140625,
"learning_rate": 4.837328526595539e-06,
"loss": 0.8119,
"step": 2970
},
{
"epoch": 5.047639212365023,
"grad_norm": 0.97265625,
"learning_rate": 4.804480953953193e-06,
"loss": 0.8013,
"step": 2980
},
{
"epoch": 5.064577598983697,
"grad_norm": 0.9296875,
"learning_rate": 4.771641830464146e-06,
"loss": 0.7954,
"step": 2990
},
{
"epoch": 5.081515985602372,
"grad_norm": 1.046875,
"learning_rate": 4.738812575237148e-06,
"loss": 0.8159,
"step": 3000
},
{
"epoch": 5.081515985602372,
"eval_loss": 1.0747902393341064,
"eval_runtime": 20.9737,
"eval_samples_per_second": 25.031,
"eval_steps_per_second": 25.031,
"step": 3000
},
{
"epoch": 5.098454372221046,
"grad_norm": 1.0625,
"learning_rate": 4.705994606954497e-06,
"loss": 0.7888,
"step": 3010
},
{
"epoch": 5.115392758839721,
"grad_norm": 1.09375,
"learning_rate": 4.673189343810747e-06,
"loss": 0.8088,
"step": 3020
},
{
"epoch": 5.132331145458395,
"grad_norm": 1.0,
"learning_rate": 4.640398203451402e-06,
"loss": 0.7762,
"step": 3030
},
{
"epoch": 5.14926953207707,
"grad_norm": 1.078125,
"learning_rate": 4.607622602911668e-06,
"loss": 0.8108,
"step": 3040
},
{
"epoch": 5.166207918695744,
"grad_norm": 1.1171875,
"learning_rate": 4.574863958555215e-06,
"loss": 0.7981,
"step": 3050
},
{
"epoch": 5.183146305314419,
"grad_norm": 1.1953125,
"learning_rate": 4.542123686012969e-06,
"loss": 0.8295,
"step": 3060
},
{
"epoch": 5.200084691933093,
"grad_norm": 1.1875,
"learning_rate": 4.509403200121936e-06,
"loss": 0.7986,
"step": 3070
},
{
"epoch": 5.217023078551768,
"grad_norm": 1.1875,
"learning_rate": 4.47670391486406e-06,
"loss": 0.792,
"step": 3080
},
{
"epoch": 5.2339614651704425,
"grad_norm": 1.1484375,
"learning_rate": 4.44402724330513e-06,
"loss": 0.8072,
"step": 3090
},
{
"epoch": 5.250899851789117,
"grad_norm": 1.3203125,
"learning_rate": 4.4113745975337e-06,
"loss": 0.7877,
"step": 3100
},
{
"epoch": 5.2678382384077915,
"grad_norm": 1.4921875,
"learning_rate": 4.378747388600075e-06,
"loss": 0.7823,
"step": 3110
},
{
"epoch": 5.284776625026466,
"grad_norm": 1.40625,
"learning_rate": 4.346147026455334e-06,
"loss": 0.7957,
"step": 3120
},
{
"epoch": 5.301715011645141,
"grad_norm": 1.390625,
"learning_rate": 4.313574919890402e-06,
"loss": 0.7562,
"step": 3130
},
{
"epoch": 5.318653398263815,
"grad_norm": 1.3671875,
"learning_rate": 4.281032476475165e-06,
"loss": 0.7981,
"step": 3140
},
{
"epoch": 5.33559178488249,
"grad_norm": 2.265625,
"learning_rate": 4.248521102497649e-06,
"loss": 0.7806,
"step": 3150
},
{
"epoch": 5.352530171501164,
"grad_norm": 2.578125,
"learning_rate": 4.216042202903246e-06,
"loss": 0.848,
"step": 3160
},
{
"epoch": 5.369468558119839,
"grad_norm": 2.171875,
"learning_rate": 4.183597181234001e-06,
"loss": 0.7903,
"step": 3170
},
{
"epoch": 5.386406944738514,
"grad_norm": 2.515625,
"learning_rate": 4.151187439567961e-06,
"loss": 0.7914,
"step": 3180
},
{
"epoch": 5.403345331357189,
"grad_norm": 2.25,
"learning_rate": 4.118814378458582e-06,
"loss": 0.7983,
"step": 3190
},
{
"epoch": 5.420283717975863,
"grad_norm": 3.09375,
"learning_rate": 4.086479396874206e-06,
"loss": 0.7606,
"step": 3200
},
{
"epoch": 5.437222104594538,
"grad_norm": 2.828125,
"learning_rate": 4.054183892137611e-06,
"loss": 0.7947,
"step": 3210
},
{
"epoch": 5.454160491213212,
"grad_norm": 4.375,
"learning_rate": 4.021929259865624e-06,
"loss": 0.7764,
"step": 3220
},
{
"epoch": 5.471098877831887,
"grad_norm": 4.46875,
"learning_rate": 3.989716893908807e-06,
"loss": 0.7829,
"step": 3230
},
{
"epoch": 5.488037264450561,
"grad_norm": 3.5625,
"learning_rate": 3.9575481862912335e-06,
"loss": 0.7811,
"step": 3240
},
{
"epoch": 5.504975651069236,
"grad_norm": 8.125,
"learning_rate": 3.925424527150321e-06,
"loss": 0.804,
"step": 3250
},
{
"epoch": 5.52191403768791,
"grad_norm": 7.96875,
"learning_rate": 3.893347304676766e-06,
"loss": 0.7975,
"step": 3260
},
{
"epoch": 5.538852424306585,
"grad_norm": 7.6875,
"learning_rate": 3.861317905054551e-06,
"loss": 0.7812,
"step": 3270
},
{
"epoch": 5.555790810925259,
"grad_norm": 8.0625,
"learning_rate": 3.8293377124010476e-06,
"loss": 0.8092,
"step": 3280
},
{
"epoch": 5.572729197543934,
"grad_norm": 8.0625,
"learning_rate": 3.797408108707193e-06,
"loss": 0.8141,
"step": 3290
},
{
"epoch": 5.5896675841626084,
"grad_norm": 2.125,
"learning_rate": 3.7655304737777775e-06,
"loss": 0.815,
"step": 3300
},
{
"epoch": 5.606605970781283,
"grad_norm": 2.109375,
"learning_rate": 3.733706185171819e-06,
"loss": 0.7505,
"step": 3310
},
{
"epoch": 5.6235443573999575,
"grad_norm": 1.96875,
"learning_rate": 3.701936618143025e-06,
"loss": 0.7535,
"step": 3320
},
{
"epoch": 5.640482744018632,
"grad_norm": 2.46875,
"learning_rate": 3.6702231455803662e-06,
"loss": 0.7703,
"step": 3330
},
{
"epoch": 5.657421130637307,
"grad_norm": 2.15625,
"learning_rate": 3.6385671379487496e-06,
"loss": 0.7493,
"step": 3340
},
{
"epoch": 5.674359517255981,
"grad_norm": 1.875,
"learning_rate": 3.606969963229798e-06,
"loss": 0.8281,
"step": 3350
},
{
"epoch": 5.691297903874656,
"grad_norm": 1.9140625,
"learning_rate": 3.5754329868627256e-06,
"loss": 0.8207,
"step": 3360
},
{
"epoch": 5.70823629049333,
"grad_norm": 2.046875,
"learning_rate": 3.5439575716853414e-06,
"loss": 0.8263,
"step": 3370
},
{
"epoch": 5.725174677112005,
"grad_norm": 2.015625,
"learning_rate": 3.5125450778751453e-06,
"loss": 0.8106,
"step": 3380
},
{
"epoch": 5.742113063730679,
"grad_norm": 2.125,
"learning_rate": 3.4811968628905646e-06,
"loss": 0.7993,
"step": 3390
},
{
"epoch": 5.759051450349354,
"grad_norm": 1.84375,
"learning_rate": 3.4499142814122766e-06,
"loss": 0.7784,
"step": 3400
},
{
"epoch": 5.775989836968029,
"grad_norm": 1.7890625,
"learning_rate": 3.418698685284676e-06,
"loss": 0.7762,
"step": 3410
},
{
"epoch": 5.792928223586704,
"grad_norm": 1.8984375,
"learning_rate": 3.387551423457456e-06,
"loss": 0.7905,
"step": 3420
},
{
"epoch": 5.809866610205378,
"grad_norm": 1.9765625,
"learning_rate": 3.356473841927313e-06,
"loss": 0.8273,
"step": 3430
},
{
"epoch": 5.826804996824053,
"grad_norm": 2.046875,
"learning_rate": 3.3254672836797814e-06,
"loss": 0.7962,
"step": 3440
},
{
"epoch": 5.843743383442727,
"grad_norm": 2.03125,
"learning_rate": 3.2945330886311964e-06,
"loss": 0.8146,
"step": 3450
},
{
"epoch": 5.860681770061402,
"grad_norm": 1.9765625,
"learning_rate": 3.263672593570797e-06,
"loss": 0.8032,
"step": 3460
},
{
"epoch": 5.877620156680076,
"grad_norm": 1.84375,
"learning_rate": 3.23288713210295e-06,
"loss": 0.7997,
"step": 3470
},
{
"epoch": 5.894558543298751,
"grad_norm": 1.8828125,
"learning_rate": 3.2021780345895216e-06,
"loss": 0.8009,
"step": 3480
},
{
"epoch": 5.911496929917425,
"grad_norm": 1.953125,
"learning_rate": 3.1715466280923924e-06,
"loss": 0.8302,
"step": 3490
},
{
"epoch": 5.9284353165361,
"grad_norm": 2.359375,
"learning_rate": 3.1409942363161037e-06,
"loss": 0.8317,
"step": 3500
},
{
"epoch": 5.9284353165361,
"eval_loss": 1.0828534364700317,
"eval_runtime": 21.0141,
"eval_samples_per_second": 24.983,
"eval_steps_per_second": 24.983,
"step": 3500
},
{
"epoch": 5.945373703154774,
"grad_norm": 2.125,
"learning_rate": 3.1105221795506584e-06,
"loss": 0.8026,
"step": 3510
},
{
"epoch": 5.962312089773449,
"grad_norm": 2.046875,
"learning_rate": 3.080131774614464e-06,
"loss": 0.8209,
"step": 3520
},
{
"epoch": 5.9792504763921235,
"grad_norm": 1.8515625,
"learning_rate": 3.049824334797431e-06,
"loss": 0.8086,
"step": 3530
},
{
"epoch": 5.996188863010798,
"grad_norm": 2.015625,
"learning_rate": 3.019601169804216e-06,
"loss": 0.7651,
"step": 3540
},
{
"epoch": 6.0131272496294725,
"grad_norm": 1.703125,
"learning_rate": 2.9894635856976268e-06,
"loss": 0.7734,
"step": 3550
},
{
"epoch": 6.030065636248147,
"grad_norm": 1.8671875,
"learning_rate": 2.959412884842181e-06,
"loss": 0.7799,
"step": 3560
},
{
"epoch": 6.047004022866822,
"grad_norm": 1.921875,
"learning_rate": 2.929450365847828e-06,
"loss": 0.7448,
"step": 3570
},
{
"epoch": 6.063942409485496,
"grad_norm": 2.0,
"learning_rate": 2.899577323513829e-06,
"loss": 0.7354,
"step": 3580
},
{
"epoch": 6.080880796104171,
"grad_norm": 2.109375,
"learning_rate": 2.8697950487728e-06,
"loss": 0.7585,
"step": 3590
},
{
"epoch": 6.097819182722846,
"grad_norm": 1.828125,
"learning_rate": 2.8401048286349353e-06,
"loss": 0.7635,
"step": 3600
},
{
"epoch": 6.114757569341521,
"grad_norm": 1.84375,
"learning_rate": 2.8105079461323818e-06,
"loss": 0.7781,
"step": 3610
},
{
"epoch": 6.131695955960195,
"grad_norm": 1.8359375,
"learning_rate": 2.7810056802637995e-06,
"loss": 0.7646,
"step": 3620
},
{
"epoch": 6.14863434257887,
"grad_norm": 1.96875,
"learning_rate": 2.751599305939082e-06,
"loss": 0.7657,
"step": 3630
},
{
"epoch": 6.165572729197544,
"grad_norm": 2.109375,
"learning_rate": 2.7222900939242787e-06,
"loss": 0.7932,
"step": 3640
},
{
"epoch": 6.182511115816219,
"grad_norm": 2.203125,
"learning_rate": 2.693079310786661e-06,
"loss": 0.7611,
"step": 3650
},
{
"epoch": 6.199449502434893,
"grad_norm": 2.1875,
"learning_rate": 2.6639682188400017e-06,
"loss": 0.7631,
"step": 3660
},
{
"epoch": 6.216387889053568,
"grad_norm": 2.140625,
"learning_rate": 2.6349580760900232e-06,
"loss": 0.7784,
"step": 3670
},
{
"epoch": 6.233326275672242,
"grad_norm": 2.234375,
"learning_rate": 2.6060501361800333e-06,
"loss": 0.7479,
"step": 3680
},
{
"epoch": 6.250264662290917,
"grad_norm": 2.171875,
"learning_rate": 2.57724564833675e-06,
"loss": 0.7367,
"step": 3690
},
{
"epoch": 6.267203048909591,
"grad_norm": 1.875,
"learning_rate": 2.5485458573163114e-06,
"loss": 0.7463,
"step": 3700
},
{
"epoch": 6.284141435528266,
"grad_norm": 1.953125,
"learning_rate": 2.519952003350499e-06,
"loss": 0.7557,
"step": 3710
},
{
"epoch": 6.30107982214694,
"grad_norm": 2.0,
"learning_rate": 2.4914653220931283e-06,
"loss": 0.8084,
"step": 3720
},
{
"epoch": 6.318018208765615,
"grad_norm": 1.875,
"learning_rate": 2.4630870445666576e-06,
"loss": 0.7493,
"step": 3730
},
{
"epoch": 6.3349565953842895,
"grad_norm": 2.15625,
"learning_rate": 2.434818397108993e-06,
"loss": 0.7959,
"step": 3740
},
{
"epoch": 6.351894982002964,
"grad_norm": 2.21875,
"learning_rate": 2.406660601320489e-06,
"loss": 0.7245,
"step": 3750
},
{
"epoch": 6.3688333686216385,
"grad_norm": 2.25,
"learning_rate": 2.378614874011162e-06,
"loss": 0.7637,
"step": 3760
},
{
"epoch": 6.385771755240313,
"grad_norm": 2.15625,
"learning_rate": 2.3506824271480987e-06,
"loss": 0.7702,
"step": 3770
},
{
"epoch": 6.402710141858988,
"grad_norm": 2.09375,
"learning_rate": 2.322864467803097e-06,
"loss": 0.7155,
"step": 3780
},
{
"epoch": 6.419648528477662,
"grad_norm": 2.015625,
"learning_rate": 2.2951621981004873e-06,
"loss": 0.7925,
"step": 3790
},
{
"epoch": 6.436586915096337,
"grad_norm": 1.96875,
"learning_rate": 2.2675768151651993e-06,
"loss": 0.7901,
"step": 3800
},
{
"epoch": 6.453525301715012,
"grad_norm": 2.09375,
"learning_rate": 2.240109511071014e-06,
"loss": 0.729,
"step": 3810
},
{
"epoch": 6.470463688333687,
"grad_norm": 2.109375,
"learning_rate": 2.2127614727890627e-06,
"loss": 0.7481,
"step": 3820
},
{
"epoch": 6.487402074952361,
"grad_norm": 1.90625,
"learning_rate": 2.1855338821365302e-06,
"loss": 0.7148,
"step": 3830
},
{
"epoch": 6.504340461571036,
"grad_norm": 1.8125,
"learning_rate": 2.158427915725573e-06,
"loss": 0.7748,
"step": 3840
},
{
"epoch": 6.52127884818971,
"grad_norm": 2.125,
"learning_rate": 2.131444744912492e-06,
"loss": 0.7601,
"step": 3850
},
{
"epoch": 6.538217234808385,
"grad_norm": 2.0625,
"learning_rate": 2.1045855357470944e-06,
"loss": 0.7397,
"step": 3860
},
{
"epoch": 6.555155621427059,
"grad_norm": 2.578125,
"learning_rate": 2.0778514489223207e-06,
"loss": 0.7072,
"step": 3870
},
{
"epoch": 6.572094008045734,
"grad_norm": 2.21875,
"learning_rate": 2.051243639724071e-06,
"loss": 0.7204,
"step": 3880
},
{
"epoch": 6.589032394664408,
"grad_norm": 2.15625,
"learning_rate": 2.024763257981296e-06,
"loss": 0.7579,
"step": 3890
},
{
"epoch": 6.605970781283083,
"grad_norm": 1.9765625,
"learning_rate": 1.9984114480162976e-06,
"loss": 0.7411,
"step": 3900
},
{
"epoch": 6.622909167901757,
"grad_norm": 2.03125,
"learning_rate": 1.9721893485952785e-06,
"loss": 0.7513,
"step": 3910
},
{
"epoch": 6.639847554520432,
"grad_norm": 2.0625,
"learning_rate": 1.9460980928791407e-06,
"loss": 0.7629,
"step": 3920
},
{
"epoch": 6.656785941139106,
"grad_norm": 1.96875,
"learning_rate": 1.9201388083745036e-06,
"loss": 0.767,
"step": 3930
},
{
"epoch": 6.673724327757781,
"grad_norm": 2.0625,
"learning_rate": 1.8943126168849947e-06,
"loss": 0.731,
"step": 3940
},
{
"epoch": 6.690662714376455,
"grad_norm": 1.78125,
"learning_rate": 1.868620634462756e-06,
"loss": 0.7335,
"step": 3950
},
{
"epoch": 6.70760110099513,
"grad_norm": 2.109375,
"learning_rate": 1.8430639713602317e-06,
"loss": 0.7791,
"step": 3960
},
{
"epoch": 6.7245394876138045,
"grad_norm": 1.9921875,
"learning_rate": 1.8176437319821789e-06,
"loss": 0.7231,
"step": 3970
},
{
"epoch": 6.741477874232479,
"grad_norm": 1.9375,
"learning_rate": 1.79236101483794e-06,
"loss": 0.7355,
"step": 3980
},
{
"epoch": 6.7584162608511535,
"grad_norm": 1.734375,
"learning_rate": 1.7672169124939843e-06,
"loss": 0.7737,
"step": 3990
},
{
"epoch": 6.775354647469829,
"grad_norm": 1.96875,
"learning_rate": 1.7422125115266785e-06,
"loss": 0.7269,
"step": 4000
},
{
"epoch": 6.775354647469829,
"eval_loss": 1.0812190771102905,
"eval_runtime": 21.7625,
"eval_samples_per_second": 24.124,
"eval_steps_per_second": 24.124,
"step": 4000
},
{
"epoch": 6.7922930340885035,
"grad_norm": 1.7890625,
"learning_rate": 1.7173488924753451e-06,
"loss": 0.7756,
"step": 4010
},
{
"epoch": 6.809231420707178,
"grad_norm": 1.6953125,
"learning_rate": 1.6926271297955549e-06,
"loss": 0.7597,
"step": 4020
},
{
"epoch": 6.8261698073258525,
"grad_norm": 1.90625,
"learning_rate": 1.668048291812709e-06,
"loss": 0.7463,
"step": 4030
},
{
"epoch": 6.843108193944527,
"grad_norm": 3.25,
"learning_rate": 1.6436134406758635e-06,
"loss": 0.7202,
"step": 4040
},
{
"epoch": 6.860046580563202,
"grad_norm": 1.6875,
"learning_rate": 1.6193236323118283e-06,
"loss": 0.7765,
"step": 4050
},
{
"epoch": 6.876984967181876,
"grad_norm": 1.84375,
"learning_rate": 1.5951799163795461e-06,
"loss": 0.7552,
"step": 4060
},
{
"epoch": 6.893923353800551,
"grad_norm": 1.6484375,
"learning_rate": 1.5711833362247204e-06,
"loss": 0.7565,
"step": 4070
},
{
"epoch": 6.910861740419225,
"grad_norm": 1.7265625,
"learning_rate": 1.547334928834739e-06,
"loss": 0.7342,
"step": 4080
},
{
"epoch": 6.9278001270379,
"grad_norm": 1.578125,
"learning_rate": 1.523635724793852e-06,
"loss": 0.7602,
"step": 4090
},
{
"epoch": 6.944738513656574,
"grad_norm": 1.6015625,
"learning_rate": 1.5000867482386478e-06,
"loss": 0.7839,
"step": 4100
},
{
"epoch": 6.961676900275249,
"grad_norm": 1.6171875,
"learning_rate": 1.4766890168137816e-06,
"loss": 0.7512,
"step": 4110
},
{
"epoch": 6.978615286893923,
"grad_norm": 1.6484375,
"learning_rate": 1.4534435416280135e-06,
"loss": 0.7715,
"step": 4120
},
{
"epoch": 6.995553673512598,
"grad_norm": 1.546875,
"learning_rate": 1.4303513272105057e-06,
"loss": 0.7924,
"step": 4130
},
{
"epoch": 7.012492060131272,
"grad_norm": 1.4453125,
"learning_rate": 1.4074133714674116e-06,
"loss": 0.7509,
"step": 4140
},
{
"epoch": 7.029430446749947,
"grad_norm": 1.3984375,
"learning_rate": 1.3846306656387603e-06,
"loss": 0.7349,
"step": 4150
},
{
"epoch": 7.046368833368621,
"grad_norm": 1.4765625,
"learning_rate": 1.3620041942556127e-06,
"loss": 0.7525,
"step": 4160
},
{
"epoch": 7.063307219987296,
"grad_norm": 1.34375,
"learning_rate": 1.339534935097524e-06,
"loss": 0.7515,
"step": 4170
},
{
"epoch": 7.0802456066059705,
"grad_norm": 1.4296875,
"learning_rate": 1.3172238591502794e-06,
"loss": 0.7433,
"step": 4180
},
{
"epoch": 7.097183993224645,
"grad_norm": 1.265625,
"learning_rate": 1.2950719305639464e-06,
"loss": 0.7416,
"step": 4190
},
{
"epoch": 7.1141223798433195,
"grad_norm": 1.3984375,
"learning_rate": 1.2730801066112024e-06,
"loss": 0.7115,
"step": 4200
},
{
"epoch": 7.131060766461994,
"grad_norm": 1.3203125,
"learning_rate": 1.251249337645965e-06,
"loss": 0.6892,
"step": 4210
},
{
"epoch": 7.1479991530806695,
"grad_norm": 1.3125,
"learning_rate": 1.229580567062334e-06,
"loss": 0.704,
"step": 4220
},
{
"epoch": 7.164937539699344,
"grad_norm": 1.3203125,
"learning_rate": 1.2080747312538082e-06,
"loss": 0.7021,
"step": 4230
},
{
"epoch": 7.1818759263180185,
"grad_norm": 1.3828125,
"learning_rate": 1.1867327595728373e-06,
"loss": 0.6669,
"step": 4240
},
{
"epoch": 7.198814312936693,
"grad_norm": 1.2890625,
"learning_rate": 1.1655555742906438e-06,
"loss": 0.7433,
"step": 4250
},
{
"epoch": 7.215752699555368,
"grad_norm": 1.3046875,
"learning_rate": 1.1445440905573857e-06,
"loss": 0.7294,
"step": 4260
},
{
"epoch": 7.232691086174042,
"grad_norm": 1.359375,
"learning_rate": 1.1236992163625933e-06,
"loss": 0.7121,
"step": 4270
},
{
"epoch": 7.249629472792717,
"grad_norm": 1.3046875,
"learning_rate": 1.1030218524959375e-06,
"loss": 0.6407,
"step": 4280
},
{
"epoch": 7.266567859411391,
"grad_norm": 1.515625,
"learning_rate": 1.082512892508309e-06,
"loss": 0.7426,
"step": 4290
},
{
"epoch": 7.283506246030066,
"grad_norm": 1.265625,
"learning_rate": 1.062173222673194e-06,
"loss": 0.7256,
"step": 4300
},
{
"epoch": 7.30044463264874,
"grad_norm": 1.140625,
"learning_rate": 1.0420037219483842e-06,
"loss": 0.7149,
"step": 4310
},
{
"epoch": 7.317383019267415,
"grad_norm": 1.171875,
"learning_rate": 1.0220052619379844e-06,
"loss": 0.7327,
"step": 4320
},
{
"epoch": 7.334321405886089,
"grad_norm": 1.2734375,
"learning_rate": 1.0021787068547568e-06,
"loss": 0.7728,
"step": 4330
},
{
"epoch": 7.351259792504764,
"grad_norm": 1.2890625,
"learning_rate": 9.825249134827664e-07,
"loss": 0.745,
"step": 4340
},
{
"epoch": 7.368198179123438,
"grad_norm": 1.046875,
"learning_rate": 9.630447311403623e-07,
"loss": 0.7625,
"step": 4350
},
{
"epoch": 7.385136565742113,
"grad_norm": 1.0703125,
"learning_rate": 9.437390016434711e-07,
"loss": 0.7297,
"step": 4360
},
{
"epoch": 7.402074952360787,
"grad_norm": 1.1015625,
"learning_rate": 9.246085592692183e-07,
"loss": 0.6938,
"step": 4370
},
{
"epoch": 7.419013338979462,
"grad_norm": 1.1015625,
"learning_rate": 9.056542307198823e-07,
"loss": 0.721,
"step": 4380
},
{
"epoch": 7.435951725598136,
"grad_norm": 1.0703125,
"learning_rate": 8.868768350871592e-07,
"loss": 0.712,
"step": 4390
},
{
"epoch": 7.452890112216811,
"grad_norm": 1.1640625,
"learning_rate": 8.682771838167775e-07,
"loss": 0.7684,
"step": 4400
},
{
"epoch": 7.469828498835486,
"grad_norm": 1.109375,
"learning_rate": 8.49856080673423e-07,
"loss": 0.723,
"step": 4410
},
{
"epoch": 7.486766885454161,
"grad_norm": 1.046875,
"learning_rate": 8.316143217060129e-07,
"loss": 0.7165,
"step": 4420
},
{
"epoch": 7.503705272072835,
"grad_norm": 1.0625,
"learning_rate": 8.135526952132877e-07,
"loss": 0.7471,
"step": 4430
},
{
"epoch": 7.52064365869151,
"grad_norm": 1.109375,
"learning_rate": 7.956719817097491e-07,
"loss": 0.7388,
"step": 4440
},
{
"epoch": 7.5375820453101845,
"grad_norm": 1.0078125,
"learning_rate": 7.779729538919351e-07,
"loss": 0.7165,
"step": 4450
},
{
"epoch": 7.554520431928859,
"grad_norm": 0.9609375,
"learning_rate": 7.6045637660502e-07,
"loss": 0.7349,
"step": 4460
},
{
"epoch": 7.5714588185475336,
"grad_norm": 1.171875,
"learning_rate": 7.4312300680977e-07,
"loss": 0.6813,
"step": 4470
},
{
"epoch": 7.588397205166208,
"grad_norm": 1.09375,
"learning_rate": 7.259735935498297e-07,
"loss": 0.7447,
"step": 4480
},
{
"epoch": 7.605335591784883,
"grad_norm": 0.9609375,
"learning_rate": 7.090088779193516e-07,
"loss": 0.6976,
"step": 4490
},
{
"epoch": 7.622273978403557,
"grad_norm": 0.98046875,
"learning_rate": 6.922295930309691e-07,
"loss": 0.7372,
"step": 4500
},
{
"epoch": 7.622273978403557,
"eval_loss": 1.0816632509231567,
"eval_runtime": 21.0317,
"eval_samples_per_second": 24.962,
"eval_steps_per_second": 24.962,
"step": 4500
},
{
"epoch": 7.639212365022232,
"grad_norm": 1.078125,
"learning_rate": 6.756364639841217e-07,
"loss": 0.703,
"step": 4510
},
{
"epoch": 7.656150751640906,
"grad_norm": 1.0390625,
"learning_rate": 6.592302078337132e-07,
"loss": 0.7121,
"step": 4520
},
{
"epoch": 7.673089138259581,
"grad_norm": 1.0078125,
"learning_rate": 6.430115335591291e-07,
"loss": 0.7669,
"step": 4530
},
{
"epoch": 7.690027524878255,
"grad_norm": 1.0234375,
"learning_rate": 6.26981142033602e-07,
"loss": 0.7015,
"step": 4540
},
{
"epoch": 7.70696591149693,
"grad_norm": 1.0390625,
"learning_rate": 6.111397259939128e-07,
"loss": 0.736,
"step": 4550
},
{
"epoch": 7.723904298115604,
"grad_norm": 1.0390625,
"learning_rate": 5.954879700104732e-07,
"loss": 0.7379,
"step": 4560
},
{
"epoch": 7.740842684734279,
"grad_norm": 1.09375,
"learning_rate": 5.800265504577202e-07,
"loss": 0.7687,
"step": 4570
},
{
"epoch": 7.757781071352953,
"grad_norm": 0.96875,
"learning_rate": 5.647561354849079e-07,
"loss": 0.7316,
"step": 4580
},
{
"epoch": 7.774719457971628,
"grad_norm": 1.03125,
"learning_rate": 5.496773849872183e-07,
"loss": 0.7466,
"step": 4590
},
{
"epoch": 7.791657844590302,
"grad_norm": 1.09375,
"learning_rate": 5.347909505772569e-07,
"loss": 0.734,
"step": 4600
},
{
"epoch": 7.808596231208977,
"grad_norm": 1.0546875,
"learning_rate": 5.200974755568849e-07,
"loss": 0.7497,
"step": 4610
},
{
"epoch": 7.8255346178276515,
"grad_norm": 1.046875,
"learning_rate": 5.055975948894226e-07,
"loss": 0.7562,
"step": 4620
},
{
"epoch": 7.842473004446326,
"grad_norm": 1.125,
"learning_rate": 4.912919351722151e-07,
"loss": 0.7017,
"step": 4630
},
{
"epoch": 7.859411391065001,
"grad_norm": 1.1328125,
"learning_rate": 4.771811146095451e-07,
"loss": 0.6976,
"step": 4640
},
{
"epoch": 7.876349777683676,
"grad_norm": 1.046875,
"learning_rate": 4.6326574298592666e-07,
"loss": 0.6964,
"step": 4650
},
{
"epoch": 7.8932881643023505,
"grad_norm": 1.203125,
"learning_rate": 4.49546421639746e-07,
"loss": 0.7239,
"step": 4660
},
{
"epoch": 7.910226550921025,
"grad_norm": 1.0625,
"learning_rate": 4.360237434372838e-07,
"loss": 0.7632,
"step": 4670
},
{
"epoch": 7.9271649375396995,
"grad_norm": 1.1640625,
"learning_rate": 4.226982927470874e-07,
"loss": 0.7184,
"step": 4680
},
{
"epoch": 7.944103324158374,
"grad_norm": 1.1484375,
"learning_rate": 4.0957064541472305e-07,
"loss": 0.7238,
"step": 4690
},
{
"epoch": 7.961041710777049,
"grad_norm": 1.421875,
"learning_rate": 3.966413687378895e-07,
"loss": 0.7361,
"step": 4700
},
{
"epoch": 7.977980097395723,
"grad_norm": 1.390625,
"learning_rate": 3.8391102144190175e-07,
"loss": 0.723,
"step": 4710
},
{
"epoch": 7.994918484014398,
"grad_norm": 1.4296875,
"learning_rate": 3.7138015365554834e-07,
"loss": 0.7064,
"step": 4720
},
{
"epoch": 8.011856870633073,
"grad_norm": 1.3125,
"learning_rate": 3.590493068873163e-07,
"loss": 0.7317,
"step": 4730
},
{
"epoch": 8.028795257251748,
"grad_norm": 1.3359375,
"learning_rate": 3.4691901400199234e-07,
"loss": 0.7102,
"step": 4740
},
{
"epoch": 8.045733643870422,
"grad_norm": 2.4375,
"learning_rate": 3.349897991976336e-07,
"loss": 0.681,
"step": 4750
},
{
"epoch": 8.062672030489097,
"grad_norm": 2.171875,
"learning_rate": 3.232621779829176e-07,
"loss": 0.6939,
"step": 4760
},
{
"epoch": 8.079610417107771,
"grad_norm": 2.140625,
"learning_rate": 3.1173665715486076e-07,
"loss": 0.7276,
"step": 4770
},
{
"epoch": 8.096548803726446,
"grad_norm": 2.09375,
"learning_rate": 3.0041373477692315e-07,
"loss": 0.6762,
"step": 4780
},
{
"epoch": 8.11348719034512,
"grad_norm": 2.171875,
"learning_rate": 2.8929390015748124e-07,
"loss": 0.7547,
"step": 4790
},
{
"epoch": 8.130425576963795,
"grad_norm": 3.71875,
"learning_rate": 2.7837763382868253e-07,
"loss": 0.716,
"step": 4800
},
{
"epoch": 8.14736396358247,
"grad_norm": 3.15625,
"learning_rate": 2.6766540752568514e-07,
"loss": 0.7306,
"step": 4810
},
{
"epoch": 8.164302350201144,
"grad_norm": 3.46875,
"learning_rate": 2.5715768416626486e-07,
"loss": 0.6856,
"step": 4820
},
{
"epoch": 8.181240736819818,
"grad_norm": 3.859375,
"learning_rate": 2.4685491783081715e-07,
"loss": 0.6798,
"step": 4830
},
{
"epoch": 8.198179123438493,
"grad_norm": 3.46875,
"learning_rate": 2.3675755374272857e-07,
"loss": 0.7652,
"step": 4840
},
{
"epoch": 8.215117510057167,
"grad_norm": 8.0,
"learning_rate": 2.268660282491436e-07,
"loss": 0.6881,
"step": 4850
},
{
"epoch": 8.232055896675842,
"grad_norm": 7.21875,
"learning_rate": 2.1718076880210325e-07,
"loss": 0.7532,
"step": 4860
},
{
"epoch": 8.248994283294516,
"grad_norm": 7.0625,
"learning_rate": 2.0770219394007362e-07,
"loss": 0.6872,
"step": 4870
},
{
"epoch": 8.265932669913191,
"grad_norm": 7.8125,
"learning_rate": 1.9843071326986264e-07,
"loss": 0.7416,
"step": 4880
},
{
"epoch": 8.282871056531866,
"grad_norm": 8.3125,
"learning_rate": 1.893667274489136e-07,
"loss": 0.703,
"step": 4890
},
{
"epoch": 8.29980944315054,
"grad_norm": 2.34375,
"learning_rate": 1.8051062816799913e-07,
"loss": 0.6915,
"step": 4900
},
{
"epoch": 8.316747829769215,
"grad_norm": 2.109375,
"learning_rate": 1.7186279813428518e-07,
"loss": 0.6801,
"step": 4910
},
{
"epoch": 8.333686216387889,
"grad_norm": 2.03125,
"learning_rate": 1.6342361105480098e-07,
"loss": 0.7441,
"step": 4920
},
{
"epoch": 8.350624603006564,
"grad_norm": 2.140625,
"learning_rate": 1.551934316202869e-07,
"loss": 0.7246,
"step": 4930
},
{
"epoch": 8.367562989625238,
"grad_norm": 2.140625,
"learning_rate": 1.471726154894304e-07,
"loss": 0.6718,
"step": 4940
},
{
"epoch": 8.384501376243913,
"grad_norm": 1.65625,
"learning_rate": 1.39361509273504e-07,
"loss": 0.709,
"step": 4950
},
{
"epoch": 8.401439762862587,
"grad_norm": 1.875,
"learning_rate": 1.3176045052138097e-07,
"loss": 0.7661,
"step": 4960
},
{
"epoch": 8.418378149481262,
"grad_norm": 2.359375,
"learning_rate": 1.2436976770495267e-07,
"loss": 0.7727,
"step": 4970
},
{
"epoch": 8.435316536099936,
"grad_norm": 1.703125,
"learning_rate": 1.1718978020492988e-07,
"loss": 0.802,
"step": 4980
},
{
"epoch": 8.45225492271861,
"grad_norm": 1.8046875,
"learning_rate": 1.1022079829704436e-07,
"loss": 0.7435,
"step": 4990
},
{
"epoch": 8.469193309337285,
"grad_norm": 1.703125,
"learning_rate": 1.0346312313864127e-07,
"loss": 0.7366,
"step": 5000
},
{
"epoch": 8.469193309337285,
"eval_loss": 1.0841984748840332,
"eval_runtime": 21.0727,
"eval_samples_per_second": 24.914,
"eval_steps_per_second": 24.914,
"step": 5000
},
{
"epoch": 8.48613169595596,
"grad_norm": 2.265625,
"learning_rate": 9.691704675565904e-08,
"loss": 0.7348,
"step": 5010
},
{
"epoch": 8.503070082574634,
"grad_norm": 1.953125,
"learning_rate": 9.058285203001837e-08,
"loss": 0.703,
"step": 5020
},
{
"epoch": 8.520008469193309,
"grad_norm": 1.796875,
"learning_rate": 8.446081268738971e-08,
"loss": 0.6796,
"step": 5030
},
{
"epoch": 8.536946855811983,
"grad_norm": 1.9140625,
"learning_rate": 7.855119328537109e-08,
"loss": 0.7424,
"step": 5040
},
{
"epoch": 8.553885242430658,
"grad_norm": 1.7890625,
"learning_rate": 7.285424920204953e-08,
"loss": 0.7379,
"step": 5050
},
{
"epoch": 8.570823629049332,
"grad_norm": 1.90625,
"learning_rate": 6.737022662497095e-08,
"loss": 0.7113,
"step": 5060
},
{
"epoch": 8.587762015668007,
"grad_norm": 1.9140625,
"learning_rate": 6.209936254049808e-08,
"loss": 0.698,
"step": 5070
},
{
"epoch": 8.604700402286682,
"grad_norm": 2.25,
"learning_rate": 5.704188472356764e-08,
"loss": 0.7441,
"step": 5080
},
{
"epoch": 8.621638788905356,
"grad_norm": 2.375,
"learning_rate": 5.219801172785255e-08,
"loss": 0.6628,
"step": 5090
},
{
"epoch": 8.63857717552403,
"grad_norm": 2.09375,
"learning_rate": 4.756795287631288e-08,
"loss": 0.7012,
"step": 5100
},
{
"epoch": 8.655515562142705,
"grad_norm": 2.375,
"learning_rate": 4.315190825215143e-08,
"loss": 0.7372,
"step": 5110
},
{
"epoch": 8.67245394876138,
"grad_norm": 1.8125,
"learning_rate": 3.895006869016837e-08,
"loss": 0.7353,
"step": 5120
},
{
"epoch": 8.689392335380056,
"grad_norm": 1.6484375,
"learning_rate": 3.496261576851345e-08,
"loss": 0.7127,
"step": 5130
},
{
"epoch": 8.706330721998729,
"grad_norm": 2.15625,
"learning_rate": 3.1189721800839453e-08,
"loss": 0.7462,
"step": 5140
},
{
"epoch": 8.723269108617405,
"grad_norm": 2.140625,
"learning_rate": 2.763154982885541e-08,
"loss": 0.7486,
"step": 5150
},
{
"epoch": 8.74020749523608,
"grad_norm": 1.9296875,
"learning_rate": 2.4288253615282754e-08,
"loss": 0.7136,
"step": 5160
},
{
"epoch": 8.757145881854754,
"grad_norm": 2.046875,
"learning_rate": 2.1159977637208452e-08,
"loss": 0.746,
"step": 5170
},
{
"epoch": 8.774084268473429,
"grad_norm": 1.90625,
"learning_rate": 1.824685707984164e-08,
"loss": 0.7379,
"step": 5180
},
{
"epoch": 8.791022655092103,
"grad_norm": 1.9375,
"learning_rate": 1.5549017830674416e-08,
"loss": 0.7561,
"step": 5190
},
{
"epoch": 8.807961041710778,
"grad_norm": 1.96875,
"learning_rate": 1.3066576474038416e-08,
"loss": 0.7722,
"step": 5200
},
{
"epoch": 8.824899428329452,
"grad_norm": 2.03125,
"learning_rate": 1.0799640286068836e-08,
"loss": 0.7318,
"step": 5210
},
{
"epoch": 8.841837814948127,
"grad_norm": 1.7890625,
"learning_rate": 8.748307230067587e-09,
"loss": 0.7358,
"step": 5220
},
{
"epoch": 8.858776201566801,
"grad_norm": 1.7734375,
"learning_rate": 6.912665952270581e-09,
"loss": 0.7051,
"step": 5230
},
{
"epoch": 8.875714588185476,
"grad_norm": 1.8203125,
"learning_rate": 5.29279577801689e-09,
"loss": 0.7542,
"step": 5240
},
{
"epoch": 8.89265297480415,
"grad_norm": 2.0625,
"learning_rate": 3.888766708319836e-09,
"loss": 0.7354,
"step": 5250
},
{
"epoch": 8.909591361422825,
"grad_norm": 2.1875,
"learning_rate": 2.700639416843287e-09,
"loss": 0.7226,
"step": 5260
},
{
"epoch": 8.9265297480415,
"grad_norm": 2.171875,
"learning_rate": 1.7284652472787655e-09,
"loss": 0.7387,
"step": 5270
},
{
"epoch": 8.943468134660174,
"grad_norm": 2.125,
"learning_rate": 9.722862111277708e-10,
"loss": 0.6839,
"step": 5280
},
{
"epoch": 8.960406521278848,
"grad_norm": 2.34375,
"learning_rate": 4.321349858843471e-10,
"loss": 0.7071,
"step": 5290
},
{
"epoch": 8.977344907897523,
"grad_norm": 1.9765625,
"learning_rate": 1.080349136250991e-10,
"loss": 0.7484,
"step": 5300
},
{
"epoch": 8.994283294516197,
"grad_norm": 1.9375,
"learning_rate": 0.0,
"loss": 0.7226,
"step": 5310
},
{
"epoch": 8.994283294516197,
"step": 5310,
"total_flos": 5.095460800932741e+17,
"train_loss": 0.9683849982173653,
"train_runtime": 5676.759,
"train_samples_per_second": 7.488,
"train_steps_per_second": 0.935
}
],
"logging_steps": 10,
"max_steps": 5310,
"num_input_tokens_seen": 0,
"num_train_epochs": 9,
"save_steps": 0,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5.095460800932741e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}