kenthua's picture
Upload folder using huggingface_hub
bed4290 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 19419,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.007724393635099644,
"grad_norm": 15.9375,
"learning_rate": 2.5746652935118437e-06,
"loss": 2.0265,
"step": 50
},
{
"epoch": 0.015448787270199289,
"grad_norm": 11.0,
"learning_rate": 5.149330587023687e-06,
"loss": 1.6081,
"step": 100
},
{
"epoch": 0.023173180905298935,
"grad_norm": 10.375,
"learning_rate": 7.723995880535532e-06,
"loss": 0.9443,
"step": 150
},
{
"epoch": 0.030897574540398577,
"grad_norm": 7.40625,
"learning_rate": 1.0298661174047375e-05,
"loss": 0.6997,
"step": 200
},
{
"epoch": 0.03862196817549823,
"grad_norm": 9.8125,
"learning_rate": 1.287332646755922e-05,
"loss": 0.6121,
"step": 250
},
{
"epoch": 0.04634636181059787,
"grad_norm": 4.90625,
"learning_rate": 1.5447991761071064e-05,
"loss": 0.6053,
"step": 300
},
{
"epoch": 0.05407075544569751,
"grad_norm": 4.84375,
"learning_rate": 1.8022657054582907e-05,
"loss": 0.4967,
"step": 350
},
{
"epoch": 0.061795149080797154,
"grad_norm": 2.578125,
"learning_rate": 2.059732234809475e-05,
"loss": 0.466,
"step": 400
},
{
"epoch": 0.0695195427158968,
"grad_norm": 3.515625,
"learning_rate": 2.3171987641606592e-05,
"loss": 0.4914,
"step": 450
},
{
"epoch": 0.07724393635099645,
"grad_norm": 1.78125,
"learning_rate": 2.574665293511844e-05,
"loss": 0.4543,
"step": 500
},
{
"epoch": 0.08496832998609609,
"grad_norm": 2.078125,
"learning_rate": 2.8321318228630278e-05,
"loss": 0.4135,
"step": 550
},
{
"epoch": 0.09269272362119574,
"grad_norm": 3.734375,
"learning_rate": 3.089598352214213e-05,
"loss": 0.4241,
"step": 600
},
{
"epoch": 0.10041711725629537,
"grad_norm": 2.671875,
"learning_rate": 3.3470648815653964e-05,
"loss": 0.4221,
"step": 650
},
{
"epoch": 0.10814151089139502,
"grad_norm": 3.953125,
"learning_rate": 3.604531410916581e-05,
"loss": 0.4454,
"step": 700
},
{
"epoch": 0.11586590452649467,
"grad_norm": 3.4375,
"learning_rate": 3.8619979402677656e-05,
"loss": 0.4157,
"step": 750
},
{
"epoch": 0.12359029816159431,
"grad_norm": 2.125,
"learning_rate": 4.11946446961895e-05,
"loss": 0.4039,
"step": 800
},
{
"epoch": 0.13131469179669397,
"grad_norm": 2.375,
"learning_rate": 4.376930998970134e-05,
"loss": 0.3939,
"step": 850
},
{
"epoch": 0.1390390854317936,
"grad_norm": 1.078125,
"learning_rate": 4.6343975283213184e-05,
"loss": 0.4059,
"step": 900
},
{
"epoch": 0.14676347906689324,
"grad_norm": 1.75,
"learning_rate": 4.891864057672503e-05,
"loss": 0.3746,
"step": 950
},
{
"epoch": 0.1544878727019929,
"grad_norm": 1.3671875,
"learning_rate": 5.149330587023688e-05,
"loss": 0.3894,
"step": 1000
},
{
"epoch": 0.16221226633709254,
"grad_norm": 3.734375,
"learning_rate": 5.406797116374872e-05,
"loss": 0.3776,
"step": 1050
},
{
"epoch": 0.16993665997219218,
"grad_norm": 5.21875,
"learning_rate": 5.6642636457260556e-05,
"loss": 0.3761,
"step": 1100
},
{
"epoch": 0.17766105360729184,
"grad_norm": 3.046875,
"learning_rate": 5.9217301750772405e-05,
"loss": 0.3685,
"step": 1150
},
{
"epoch": 0.18538544724239148,
"grad_norm": 2.921875,
"learning_rate": 6.179196704428426e-05,
"loss": 0.3576,
"step": 1200
},
{
"epoch": 0.1931098408774911,
"grad_norm": 1.765625,
"learning_rate": 6.436663233779608e-05,
"loss": 0.3846,
"step": 1250
},
{
"epoch": 0.20083423451259075,
"grad_norm": 2.296875,
"learning_rate": 6.694129763130793e-05,
"loss": 0.3429,
"step": 1300
},
{
"epoch": 0.2085586281476904,
"grad_norm": 1.953125,
"learning_rate": 6.951596292481978e-05,
"loss": 0.337,
"step": 1350
},
{
"epoch": 0.21628302178279005,
"grad_norm": 2.484375,
"learning_rate": 7.209062821833163e-05,
"loss": 0.385,
"step": 1400
},
{
"epoch": 0.22400741541788968,
"grad_norm": 1.3203125,
"learning_rate": 7.466529351184346e-05,
"loss": 0.3183,
"step": 1450
},
{
"epoch": 0.23173180905298935,
"grad_norm": 2.046875,
"learning_rate": 7.723995880535531e-05,
"loss": 0.3743,
"step": 1500
},
{
"epoch": 0.23945620268808898,
"grad_norm": 2.03125,
"learning_rate": 7.981462409886715e-05,
"loss": 0.3434,
"step": 1550
},
{
"epoch": 0.24718059632318862,
"grad_norm": 2.578125,
"learning_rate": 8.2389289392379e-05,
"loss": 0.311,
"step": 1600
},
{
"epoch": 0.25490498995828825,
"grad_norm": 3.765625,
"learning_rate": 8.496395468589084e-05,
"loss": 0.3344,
"step": 1650
},
{
"epoch": 0.26262938359338794,
"grad_norm": 1.8046875,
"learning_rate": 8.753861997940268e-05,
"loss": 0.3135,
"step": 1700
},
{
"epoch": 0.2703537772284876,
"grad_norm": 2.234375,
"learning_rate": 9.011328527291453e-05,
"loss": 0.3097,
"step": 1750
},
{
"epoch": 0.2780781708635872,
"grad_norm": 1.859375,
"learning_rate": 9.268795056642637e-05,
"loss": 0.3259,
"step": 1800
},
{
"epoch": 0.28580256449868685,
"grad_norm": 2.671875,
"learning_rate": 9.526261585993821e-05,
"loss": 0.3601,
"step": 1850
},
{
"epoch": 0.2935269581337865,
"grad_norm": 1.9296875,
"learning_rate": 9.783728115345005e-05,
"loss": 0.308,
"step": 1900
},
{
"epoch": 0.3012513517688861,
"grad_norm": 2.671875,
"learning_rate": 0.0001004119464469619,
"loss": 0.3259,
"step": 1950
},
{
"epoch": 0.3089757454039858,
"grad_norm": 1.125,
"learning_rate": 0.00010298661174047375,
"loss": 0.2879,
"step": 2000
},
{
"epoch": 0.31670013903908545,
"grad_norm": 2.484375,
"learning_rate": 0.0001055612770339856,
"loss": 0.3252,
"step": 2050
},
{
"epoch": 0.3244245326741851,
"grad_norm": 1.09375,
"learning_rate": 0.00010813594232749744,
"loss": 0.3302,
"step": 2100
},
{
"epoch": 0.3321489263092847,
"grad_norm": 1.21875,
"learning_rate": 0.00011071060762100927,
"loss": 0.313,
"step": 2150
},
{
"epoch": 0.33987331994438436,
"grad_norm": 2.125,
"learning_rate": 0.00011328527291452111,
"loss": 0.3088,
"step": 2200
},
{
"epoch": 0.347597713579484,
"grad_norm": 2.46875,
"learning_rate": 0.00011585993820803295,
"loss": 0.2967,
"step": 2250
},
{
"epoch": 0.3553221072145837,
"grad_norm": 2.375,
"learning_rate": 0.00011843460350154481,
"loss": 0.2821,
"step": 2300
},
{
"epoch": 0.3630465008496833,
"grad_norm": 3.40625,
"learning_rate": 0.00012100926879505665,
"loss": 0.3559,
"step": 2350
},
{
"epoch": 0.37077089448478295,
"grad_norm": 0.984375,
"learning_rate": 0.0001235839340885685,
"loss": 0.295,
"step": 2400
},
{
"epoch": 0.3784952881198826,
"grad_norm": 1.9296875,
"learning_rate": 0.00012615859938208034,
"loss": 0.3016,
"step": 2450
},
{
"epoch": 0.3862196817549822,
"grad_norm": 2.703125,
"learning_rate": 0.00012873326467559217,
"loss": 0.2975,
"step": 2500
},
{
"epoch": 0.39394407539008186,
"grad_norm": 1.5390625,
"learning_rate": 0.00013130792996910402,
"loss": 0.3172,
"step": 2550
},
{
"epoch": 0.4016684690251815,
"grad_norm": 1.7265625,
"learning_rate": 0.00013388259526261585,
"loss": 0.2923,
"step": 2600
},
{
"epoch": 0.4093928626602812,
"grad_norm": 1.8125,
"learning_rate": 0.0001364572605561277,
"loss": 0.2763,
"step": 2650
},
{
"epoch": 0.4171172562953808,
"grad_norm": 2.0625,
"learning_rate": 0.00013903192584963957,
"loss": 0.3312,
"step": 2700
},
{
"epoch": 0.42484164993048046,
"grad_norm": 1.765625,
"learning_rate": 0.0001416065911431514,
"loss": 0.3032,
"step": 2750
},
{
"epoch": 0.4325660435655801,
"grad_norm": 2.578125,
"learning_rate": 0.00014418125643666325,
"loss": 0.3055,
"step": 2800
},
{
"epoch": 0.44029043720067973,
"grad_norm": 3.28125,
"learning_rate": 0.00014675592173017508,
"loss": 0.3208,
"step": 2850
},
{
"epoch": 0.44801483083577937,
"grad_norm": 3.328125,
"learning_rate": 0.0001493305870236869,
"loss": 0.2917,
"step": 2900
},
{
"epoch": 0.45573922447087906,
"grad_norm": 1.6015625,
"learning_rate": 0.00015190525231719877,
"loss": 0.2998,
"step": 2950
},
{
"epoch": 0.4634636181059787,
"grad_norm": 1.2109375,
"learning_rate": 0.00015447991761071062,
"loss": 0.2823,
"step": 3000
},
{
"epoch": 0.4711880117410783,
"grad_norm": 2.265625,
"learning_rate": 0.00015705458290422245,
"loss": 0.3304,
"step": 3050
},
{
"epoch": 0.47891240537617796,
"grad_norm": 2.34375,
"learning_rate": 0.0001596292481977343,
"loss": 0.2956,
"step": 3100
},
{
"epoch": 0.4866367990112776,
"grad_norm": 2.03125,
"learning_rate": 0.00016220391349124614,
"loss": 0.2934,
"step": 3150
},
{
"epoch": 0.49436119264637723,
"grad_norm": 0.93359375,
"learning_rate": 0.000164778578784758,
"loss": 0.3031,
"step": 3200
},
{
"epoch": 0.5020855862814769,
"grad_norm": 3.046875,
"learning_rate": 0.00016735324407826982,
"loss": 0.2679,
"step": 3250
},
{
"epoch": 0.5098099799165765,
"grad_norm": 3.203125,
"learning_rate": 0.00016992790937178168,
"loss": 0.3182,
"step": 3300
},
{
"epoch": 0.5175343735516762,
"grad_norm": 0.94921875,
"learning_rate": 0.0001725025746652935,
"loss": 0.2888,
"step": 3350
},
{
"epoch": 0.5252587671867759,
"grad_norm": 4.1875,
"learning_rate": 0.00017507723995880537,
"loss": 0.326,
"step": 3400
},
{
"epoch": 0.5329831608218755,
"grad_norm": 3.09375,
"learning_rate": 0.0001776519052523172,
"loss": 0.3184,
"step": 3450
},
{
"epoch": 0.5407075544569752,
"grad_norm": 2.0625,
"learning_rate": 0.00018022657054582905,
"loss": 0.3011,
"step": 3500
},
{
"epoch": 0.5484319480920747,
"grad_norm": 0.796875,
"learning_rate": 0.0001828012358393409,
"loss": 0.309,
"step": 3550
},
{
"epoch": 0.5561563417271744,
"grad_norm": 1.546875,
"learning_rate": 0.00018537590113285274,
"loss": 0.2876,
"step": 3600
},
{
"epoch": 0.563880735362274,
"grad_norm": 2.828125,
"learning_rate": 0.00018795056642636457,
"loss": 0.3071,
"step": 3650
},
{
"epoch": 0.5716051289973737,
"grad_norm": 2.296875,
"learning_rate": 0.00019052523171987642,
"loss": 0.3237,
"step": 3700
},
{
"epoch": 0.5793295226324734,
"grad_norm": 0.77734375,
"learning_rate": 0.00019309989701338825,
"loss": 0.2923,
"step": 3750
},
{
"epoch": 0.587053916267573,
"grad_norm": 2.015625,
"learning_rate": 0.0001956745623069001,
"loss": 0.296,
"step": 3800
},
{
"epoch": 0.5947783099026727,
"grad_norm": 1.8828125,
"learning_rate": 0.00019824922760041197,
"loss": 0.2936,
"step": 3850
},
{
"epoch": 0.6025027035377722,
"grad_norm": 1.6484375,
"learning_rate": 0.0001999994765359982,
"loss": 0.3222,
"step": 3900
},
{
"epoch": 0.6102270971728719,
"grad_norm": 4.65625,
"learning_rate": 0.00019999109305729934,
"loss": 0.2641,
"step": 3950
},
{
"epoch": 0.6179514908079716,
"grad_norm": 3.1875,
"learning_rate": 0.0001999724866611328,
"loss": 0.2733,
"step": 4000
},
{
"epoch": 0.6256758844430712,
"grad_norm": 1.390625,
"learning_rate": 0.00019994365924978462,
"loss": 0.2864,
"step": 4050
},
{
"epoch": 0.6334002780781709,
"grad_norm": 3.15625,
"learning_rate": 0.00019990461377051967,
"loss": 0.2958,
"step": 4100
},
{
"epoch": 0.6411246717132705,
"grad_norm": 1.9609375,
"learning_rate": 0.0001998553542152807,
"loss": 0.2515,
"step": 4150
},
{
"epoch": 0.6488490653483702,
"grad_norm": 1.9609375,
"learning_rate": 0.00019979588562027995,
"loss": 0.3032,
"step": 4200
},
{
"epoch": 0.6565734589834697,
"grad_norm": 0.6484375,
"learning_rate": 0.0001997262140654843,
"loss": 0.2912,
"step": 4250
},
{
"epoch": 0.6642978526185694,
"grad_norm": 2.375,
"learning_rate": 0.0001996463466739938,
"loss": 0.2887,
"step": 4300
},
{
"epoch": 0.6720222462536691,
"grad_norm": 1.921875,
"learning_rate": 0.0001995562916113133,
"loss": 0.3176,
"step": 4350
},
{
"epoch": 0.6797466398887687,
"grad_norm": 1.1171875,
"learning_rate": 0.0001994560580845175,
"loss": 0.2614,
"step": 4400
},
{
"epoch": 0.6874710335238684,
"grad_norm": 1.1015625,
"learning_rate": 0.0001993456563413099,
"loss": 0.2594,
"step": 4450
},
{
"epoch": 0.695195427158968,
"grad_norm": 1.515625,
"learning_rate": 0.00019922509766897502,
"loss": 0.2836,
"step": 4500
},
{
"epoch": 0.7029198207940677,
"grad_norm": 3.125,
"learning_rate": 0.0001990943943932242,
"loss": 0.305,
"step": 4550
},
{
"epoch": 0.7106442144291674,
"grad_norm": 5.71875,
"learning_rate": 0.0001989535598769357,
"loss": 0.2652,
"step": 4600
},
{
"epoch": 0.718368608064267,
"grad_norm": 1.625,
"learning_rate": 0.00019880260851878835,
"loss": 0.2911,
"step": 4650
},
{
"epoch": 0.7260930016993666,
"grad_norm": 2.96875,
"learning_rate": 0.00019864155575178953,
"loss": 0.2663,
"step": 4700
},
{
"epoch": 0.7338173953344662,
"grad_norm": 4.40625,
"learning_rate": 0.0001984704180416973,
"loss": 0.2744,
"step": 4750
},
{
"epoch": 0.7415417889695659,
"grad_norm": 2.640625,
"learning_rate": 0.00019828921288533695,
"loss": 0.2956,
"step": 4800
},
{
"epoch": 0.7492661826046655,
"grad_norm": 2.828125,
"learning_rate": 0.00019809795880881216,
"loss": 0.2574,
"step": 4850
},
{
"epoch": 0.7569905762397652,
"grad_norm": 0.734375,
"learning_rate": 0.0001978966753656109,
"loss": 0.3144,
"step": 4900
},
{
"epoch": 0.7647149698748649,
"grad_norm": 2.09375,
"learning_rate": 0.00019768538313460647,
"loss": 0.2984,
"step": 4950
},
{
"epoch": 0.7724393635099644,
"grad_norm": 1.1328125,
"learning_rate": 0.00019746410371795334,
"loss": 0.2347,
"step": 5000
},
{
"epoch": 0.7801637571450641,
"grad_norm": 2.890625,
"learning_rate": 0.00019723285973887865,
"loss": 0.246,
"step": 5050
},
{
"epoch": 0.7878881507801637,
"grad_norm": 2.046875,
"learning_rate": 0.0001969916748393693,
"loss": 0.2651,
"step": 5100
},
{
"epoch": 0.7956125444152634,
"grad_norm": 1.03125,
"learning_rate": 0.00019674057367775487,
"loss": 0.2604,
"step": 5150
},
{
"epoch": 0.803336938050363,
"grad_norm": 3.296875,
"learning_rate": 0.00019647958192618644,
"loss": 0.2768,
"step": 5200
},
{
"epoch": 0.8110613316854627,
"grad_norm": 5.09375,
"learning_rate": 0.00019620872626801203,
"loss": 0.253,
"step": 5250
},
{
"epoch": 0.8187857253205624,
"grad_norm": 1.8359375,
"learning_rate": 0.00019592803439504855,
"loss": 0.259,
"step": 5300
},
{
"epoch": 0.826510118955662,
"grad_norm": 4.125,
"learning_rate": 0.00019563753500475054,
"loss": 0.2703,
"step": 5350
},
{
"epoch": 0.8342345125907616,
"grad_norm": 1.25,
"learning_rate": 0.00019533725779727635,
"loss": 0.2941,
"step": 5400
},
{
"epoch": 0.8419589062258612,
"grad_norm": 2.609375,
"learning_rate": 0.00019502723347245143,
"loss": 0.2706,
"step": 5450
},
{
"epoch": 0.8496832998609609,
"grad_norm": 3.453125,
"learning_rate": 0.0001947074937266299,
"loss": 0.3306,
"step": 5500
},
{
"epoch": 0.8574076934960606,
"grad_norm": 3.421875,
"learning_rate": 0.00019437807124945362,
"loss": 0.2738,
"step": 5550
},
{
"epoch": 0.8651320871311602,
"grad_norm": 2.46875,
"learning_rate": 0.00019403899972051045,
"loss": 0.2659,
"step": 5600
},
{
"epoch": 0.8728564807662599,
"grad_norm": 1.5234375,
"learning_rate": 0.0001936903138058906,
"loss": 0.2569,
"step": 5650
},
{
"epoch": 0.8805808744013595,
"grad_norm": 4.09375,
"learning_rate": 0.00019333204915464265,
"loss": 0.2517,
"step": 5700
},
{
"epoch": 0.8883052680364592,
"grad_norm": 2.703125,
"learning_rate": 0.0001929642423951286,
"loss": 0.2738,
"step": 5750
},
{
"epoch": 0.8960296616715587,
"grad_norm": 0.78515625,
"learning_rate": 0.0001925869311312793,
"loss": 0.2498,
"step": 5800
},
{
"epoch": 0.9037540553066584,
"grad_norm": 2.125,
"learning_rate": 0.00019220015393874978,
"loss": 0.231,
"step": 5850
},
{
"epoch": 0.9114784489417581,
"grad_norm": 1.171875,
"learning_rate": 0.0001918039503609753,
"loss": 0.23,
"step": 5900
},
{
"epoch": 0.9192028425768577,
"grad_norm": 1.546875,
"learning_rate": 0.0001913983609051286,
"loss": 0.2203,
"step": 5950
},
{
"epoch": 0.9269272362119574,
"grad_norm": 4.0625,
"learning_rate": 0.0001909834270379784,
"loss": 0.2902,
"step": 6000
},
{
"epoch": 0.934651629847057,
"grad_norm": 0.6171875,
"learning_rate": 0.00019055919118165002,
"loss": 0.2639,
"step": 6050
},
{
"epoch": 0.9423760234821567,
"grad_norm": 2.65625,
"learning_rate": 0.00019012569670928813,
"loss": 0.2503,
"step": 6100
},
{
"epoch": 0.9501004171172563,
"grad_norm": 1.1875,
"learning_rate": 0.00018968298794062242,
"loss": 0.2697,
"step": 6150
},
{
"epoch": 0.9578248107523559,
"grad_norm": 2.28125,
"learning_rate": 0.00018923111013743635,
"loss": 0.2265,
"step": 6200
},
{
"epoch": 0.9655492043874556,
"grad_norm": 2.109375,
"learning_rate": 0.00018877010949893975,
"loss": 0.2583,
"step": 6250
},
{
"epoch": 0.9732735980225552,
"grad_norm": 2.703125,
"learning_rate": 0.00018830003315704544,
"loss": 0.24,
"step": 6300
},
{
"epoch": 0.9809979916576549,
"grad_norm": 2.046875,
"learning_rate": 0.00018782092917155047,
"loss": 0.2824,
"step": 6350
},
{
"epoch": 0.9887223852927545,
"grad_norm": 2.34375,
"learning_rate": 0.00018733284652522285,
"loss": 0.2911,
"step": 6400
},
{
"epoch": 0.9964467789278542,
"grad_norm": 2.15625,
"learning_rate": 0.0001868358351187932,
"loss": 0.2468,
"step": 6450
},
{
"epoch": 1.0041711725629539,
"grad_norm": 1.375,
"learning_rate": 0.00018632994576585343,
"loss": 0.2142,
"step": 6500
},
{
"epoch": 1.0118955661980535,
"grad_norm": 1.1875,
"learning_rate": 0.0001858152301876613,
"loss": 0.2236,
"step": 6550
},
{
"epoch": 1.019619959833153,
"grad_norm": 1.0,
"learning_rate": 0.00018529174100785276,
"loss": 0.1858,
"step": 6600
},
{
"epoch": 1.0273443534682527,
"grad_norm": 1.0625,
"learning_rate": 0.0001847595317470616,
"loss": 0.1911,
"step": 6650
},
{
"epoch": 1.0350687471033524,
"grad_norm": 1.4296875,
"learning_rate": 0.00018421865681744786,
"loss": 0.2166,
"step": 6700
},
{
"epoch": 1.042793140738452,
"grad_norm": 2.0,
"learning_rate": 0.0001836691715171345,
"loss": 0.2184,
"step": 6750
},
{
"epoch": 1.0505175343735518,
"grad_norm": 0.8828125,
"learning_rate": 0.00018311113202455407,
"loss": 0.2082,
"step": 6800
},
{
"epoch": 1.0582419280086512,
"grad_norm": 1.125,
"learning_rate": 0.00018254459539270495,
"loss": 0.2122,
"step": 6850
},
{
"epoch": 1.065966321643751,
"grad_norm": 2.8125,
"learning_rate": 0.00018196961954331862,
"loss": 0.2062,
"step": 6900
},
{
"epoch": 1.0736907152788506,
"grad_norm": 2.046875,
"learning_rate": 0.00018138626326093742,
"loss": 0.203,
"step": 6950
},
{
"epoch": 1.0814151089139503,
"grad_norm": 1.8046875,
"learning_rate": 0.00018079458618690491,
"loss": 0.1923,
"step": 7000
},
{
"epoch": 1.0891395025490498,
"grad_norm": 1.390625,
"learning_rate": 0.00018019464881326803,
"loss": 0.1923,
"step": 7050
},
{
"epoch": 1.0968638961841495,
"grad_norm": 1.2734375,
"learning_rate": 0.00017958651247659257,
"loss": 0.1892,
"step": 7100
},
{
"epoch": 1.1045882898192492,
"grad_norm": 0.83984375,
"learning_rate": 0.00017897023935169214,
"loss": 0.2116,
"step": 7150
},
{
"epoch": 1.1123126834543489,
"grad_norm": 0.376953125,
"learning_rate": 0.0001783458924452717,
"loss": 0.1857,
"step": 7200
},
{
"epoch": 1.1200370770894486,
"grad_norm": 1.40625,
"learning_rate": 0.00017771353558948568,
"loss": 0.2102,
"step": 7250
},
{
"epoch": 1.1277614707245482,
"grad_norm": 1.203125,
"learning_rate": 0.00017707323343541198,
"loss": 0.2191,
"step": 7300
},
{
"epoch": 1.1354858643596477,
"grad_norm": 2.234375,
"learning_rate": 0.00017642505144644212,
"loss": 0.203,
"step": 7350
},
{
"epoch": 1.1432102579947474,
"grad_norm": 1.4140625,
"learning_rate": 0.00017576905589158844,
"loss": 0.2028,
"step": 7400
},
{
"epoch": 1.150934651629847,
"grad_norm": 1.203125,
"learning_rate": 0.00017510531383870882,
"loss": 0.2027,
"step": 7450
},
{
"epoch": 1.1586590452649468,
"grad_norm": 1.8984375,
"learning_rate": 0.00017443389314764977,
"loss": 0.2239,
"step": 7500
},
{
"epoch": 1.1663834389000463,
"grad_norm": 2.3125,
"learning_rate": 0.00017375486246330848,
"loss": 0.1946,
"step": 7550
},
{
"epoch": 1.174107832535146,
"grad_norm": 0.609375,
"learning_rate": 0.0001730682912086149,
"loss": 0.2018,
"step": 7600
},
{
"epoch": 1.1818322261702456,
"grad_norm": 1.3671875,
"learning_rate": 0.0001723742495774339,
"loss": 0.189,
"step": 7650
},
{
"epoch": 1.1895566198053453,
"grad_norm": 4.375,
"learning_rate": 0.00017167280852738878,
"loss": 0.2031,
"step": 7700
},
{
"epoch": 1.197281013440445,
"grad_norm": 3.453125,
"learning_rate": 0.00017096403977260666,
"loss": 0.2191,
"step": 7750
},
{
"epoch": 1.2050054070755445,
"grad_norm": 1.5390625,
"learning_rate": 0.0001702480157763867,
"loss": 0.211,
"step": 7800
},
{
"epoch": 1.2127298007106442,
"grad_norm": 0.51171875,
"learning_rate": 0.00016952480974379136,
"loss": 0.2126,
"step": 7850
},
{
"epoch": 1.2204541943457439,
"grad_norm": 1.0703125,
"learning_rate": 0.00016879449561416228,
"loss": 0.1914,
"step": 7900
},
{
"epoch": 1.2281785879808436,
"grad_norm": 0.8671875,
"learning_rate": 0.00016805714805356056,
"loss": 0.1786,
"step": 7950
},
{
"epoch": 1.235902981615943,
"grad_norm": 1.421875,
"learning_rate": 0.00016731284244713335,
"loss": 0.2282,
"step": 8000
},
{
"epoch": 1.2436273752510427,
"grad_norm": 2.953125,
"learning_rate": 0.00016656165489140632,
"loss": 0.1904,
"step": 8050
},
{
"epoch": 1.2513517688861424,
"grad_norm": 1.9140625,
"learning_rate": 0.0001658036621865039,
"loss": 0.1763,
"step": 8100
},
{
"epoch": 1.259076162521242,
"grad_norm": 1.515625,
"learning_rate": 0.0001650389418282973,
"loss": 0.2155,
"step": 8150
},
{
"epoch": 1.2668005561563418,
"grad_norm": 2.484375,
"learning_rate": 0.00016426757200048125,
"loss": 0.2114,
"step": 8200
},
{
"epoch": 1.2745249497914415,
"grad_norm": 2.71875,
"learning_rate": 0.00016348963156658105,
"loss": 0.1681,
"step": 8250
},
{
"epoch": 1.282249343426541,
"grad_norm": 1.4921875,
"learning_rate": 0.00016270520006188925,
"loss": 0.1818,
"step": 8300
},
{
"epoch": 1.2899737370616406,
"grad_norm": 1.734375,
"learning_rate": 0.00016191435768533457,
"loss": 0.1826,
"step": 8350
},
{
"epoch": 1.2976981306967403,
"grad_norm": 0.74609375,
"learning_rate": 0.000161117185291282,
"loss": 0.1847,
"step": 8400
},
{
"epoch": 1.30542252433184,
"grad_norm": 1.265625,
"learning_rate": 0.00016031376438126687,
"loss": 0.2101,
"step": 8450
},
{
"epoch": 1.3131469179669395,
"grad_norm": 1.0625,
"learning_rate": 0.00015950417709566182,
"loss": 0.193,
"step": 8500
},
{
"epoch": 1.3208713116020392,
"grad_norm": 1.0234375,
"learning_rate": 0.0001586885062052793,
"loss": 0.2035,
"step": 8550
},
{
"epoch": 1.3285957052371389,
"grad_norm": 1.2421875,
"learning_rate": 0.000157866835102909,
"loss": 0.1661,
"step": 8600
},
{
"epoch": 1.3363200988722386,
"grad_norm": 4.03125,
"learning_rate": 0.00015703924779479186,
"loss": 0.1882,
"step": 8650
},
{
"epoch": 1.3440444925073383,
"grad_norm": 0.7734375,
"learning_rate": 0.00015620582889203175,
"loss": 0.1959,
"step": 8700
},
{
"epoch": 1.351768886142438,
"grad_norm": 1.578125,
"learning_rate": 0.00015536666360194454,
"loss": 0.1594,
"step": 8750
},
{
"epoch": 1.3594932797775374,
"grad_norm": 1.890625,
"learning_rate": 0.00015452183771934696,
"loss": 0.1855,
"step": 8800
},
{
"epoch": 1.3672176734126371,
"grad_norm": 1.0078125,
"learning_rate": 0.00015367143761778503,
"loss": 0.1806,
"step": 8850
},
{
"epoch": 1.3749420670477368,
"grad_norm": 1.6796875,
"learning_rate": 0.00015281555024070323,
"loss": 0.1944,
"step": 8900
},
{
"epoch": 1.3826664606828363,
"grad_norm": 1.6171875,
"learning_rate": 0.0001519542630925557,
"loss": 0.1872,
"step": 8950
},
{
"epoch": 1.390390854317936,
"grad_norm": 1.1875,
"learning_rate": 0.00015108766422985994,
"loss": 0.1587,
"step": 9000
},
{
"epoch": 1.3981152479530357,
"grad_norm": 1.703125,
"learning_rate": 0.00015021584225219394,
"loss": 0.1686,
"step": 9050
},
{
"epoch": 1.4058396415881353,
"grad_norm": 0.63671875,
"learning_rate": 0.00014933888629313803,
"loss": 0.1766,
"step": 9100
},
{
"epoch": 1.413564035223235,
"grad_norm": 2.0,
"learning_rate": 0.00014845688601116194,
"loss": 0.1688,
"step": 9150
},
{
"epoch": 1.4212884288583347,
"grad_norm": 1.2109375,
"learning_rate": 0.00014756993158045837,
"loss": 0.175,
"step": 9200
},
{
"epoch": 1.4290128224934342,
"grad_norm": 1.578125,
"learning_rate": 0.0001466781136817236,
"loss": 0.1549,
"step": 9250
},
{
"epoch": 1.436737216128534,
"grad_norm": 1.546875,
"learning_rate": 0.00014578152349288648,
"loss": 0.1776,
"step": 9300
},
{
"epoch": 1.4444616097636336,
"grad_norm": 0.953125,
"learning_rate": 0.00014488025267978683,
"loss": 0.1887,
"step": 9350
},
{
"epoch": 1.4521860033987333,
"grad_norm": 0.94921875,
"learning_rate": 0.0001439743933868031,
"loss": 0.1854,
"step": 9400
},
{
"epoch": 1.4599103970338327,
"grad_norm": 0.79296875,
"learning_rate": 0.0001430640382274323,
"loss": 0.16,
"step": 9450
},
{
"epoch": 1.4676347906689324,
"grad_norm": 0.921875,
"learning_rate": 0.000142149280274821,
"loss": 0.1752,
"step": 9500
},
{
"epoch": 1.4753591843040321,
"grad_norm": 1.1171875,
"learning_rate": 0.00014123021305224978,
"loss": 0.1845,
"step": 9550
},
{
"epoch": 1.4830835779391318,
"grad_norm": 1.1640625,
"learning_rate": 0.00014030693052357164,
"loss": 0.1675,
"step": 9600
},
{
"epoch": 1.4908079715742315,
"grad_norm": 0.9453125,
"learning_rate": 0.00013937952708360508,
"loss": 0.1677,
"step": 9650
},
{
"epoch": 1.4985323652093312,
"grad_norm": 1.640625,
"learning_rate": 0.00013844809754848361,
"loss": 0.1817,
"step": 9700
},
{
"epoch": 1.5062567588444307,
"grad_norm": 1.5703125,
"learning_rate": 0.0001375127371459618,
"loss": 0.1588,
"step": 9750
},
{
"epoch": 1.5139811524795304,
"grad_norm": 3.328125,
"learning_rate": 0.00013657354150567924,
"loss": 0.1769,
"step": 9800
},
{
"epoch": 1.52170554611463,
"grad_norm": 1.7734375,
"learning_rate": 0.00013563060664938375,
"loss": 0.1604,
"step": 9850
},
{
"epoch": 1.5294299397497295,
"grad_norm": 1.1484375,
"learning_rate": 0.00013468402898111407,
"loss": 0.1549,
"step": 9900
},
{
"epoch": 1.5371543333848292,
"grad_norm": 1.8125,
"learning_rate": 0.00013373390527734387,
"loss": 0.1789,
"step": 9950
},
{
"epoch": 1.544878727019929,
"grad_norm": 0.5859375,
"learning_rate": 0.0001327803326770873,
"loss": 0.1651,
"step": 10000
},
{
"epoch": 1.5526031206550286,
"grad_norm": 1.4765625,
"learning_rate": 0.0001318234086719678,
"loss": 0.1789,
"step": 10050
},
{
"epoch": 1.5603275142901283,
"grad_norm": 2.125,
"learning_rate": 0.00013086323109625072,
"loss": 0.1774,
"step": 10100
},
{
"epoch": 1.568051907925228,
"grad_norm": 1.953125,
"learning_rate": 0.0001298998981168408,
"loss": 0.1604,
"step": 10150
},
{
"epoch": 1.5757763015603277,
"grad_norm": 2.890625,
"learning_rate": 0.0001289335082232459,
"loss": 0.1755,
"step": 10200
},
{
"epoch": 1.5835006951954271,
"grad_norm": 0.7578125,
"learning_rate": 0.0001279641602175075,
"loss": 0.1554,
"step": 10250
},
{
"epoch": 1.5912250888305268,
"grad_norm": 0.4609375,
"learning_rate": 0.00012699195320409938,
"loss": 0.1518,
"step": 10300
},
{
"epoch": 1.5989494824656263,
"grad_norm": 0.6484375,
"learning_rate": 0.00012601698657979527,
"loss": 0.1689,
"step": 10350
},
{
"epoch": 1.606673876100726,
"grad_norm": 0.6953125,
"learning_rate": 0.00012503936002350699,
"loss": 0.139,
"step": 10400
},
{
"epoch": 1.6143982697358257,
"grad_norm": 1.171875,
"learning_rate": 0.00012405917348609296,
"loss": 0.1465,
"step": 10450
},
{
"epoch": 1.6221226633709254,
"grad_norm": 1.6875,
"learning_rate": 0.0001230765271801399,
"loss": 0.1446,
"step": 10500
},
{
"epoch": 1.629847057006025,
"grad_norm": 0.9921875,
"learning_rate": 0.00012209152156971692,
"loss": 0.1393,
"step": 10550
},
{
"epoch": 1.6375714506411247,
"grad_norm": 1.1640625,
"learning_rate": 0.00012110425736010438,
"loss": 0.1418,
"step": 10600
},
{
"epoch": 1.6452958442762244,
"grad_norm": 0.52734375,
"learning_rate": 0.00012011483548749793,
"loss": 0.1552,
"step": 10650
},
{
"epoch": 1.653020237911324,
"grad_norm": 0.44140625,
"learning_rate": 0.00011912335710868905,
"loss": 0.1533,
"step": 10700
},
{
"epoch": 1.6607446315464236,
"grad_norm": 0.5625,
"learning_rate": 0.00011812992359072275,
"loss": 0.1625,
"step": 10750
},
{
"epoch": 1.6684690251815233,
"grad_norm": 2.6875,
"learning_rate": 0.00011713463650053423,
"loss": 0.1542,
"step": 10800
},
{
"epoch": 1.6761934188166228,
"grad_norm": 1.453125,
"learning_rate": 0.00011613759759456462,
"loss": 0.1306,
"step": 10850
},
{
"epoch": 1.6839178124517225,
"grad_norm": 0.89453125,
"learning_rate": 0.00011513890880835776,
"loss": 0.1784,
"step": 10900
},
{
"epoch": 1.6916422060868221,
"grad_norm": 2.46875,
"learning_rate": 0.00011413867224613843,
"loss": 0.1384,
"step": 10950
},
{
"epoch": 1.6993665997219218,
"grad_norm": 2.0,
"learning_rate": 0.00011313699017037327,
"loss": 0.1279,
"step": 11000
},
{
"epoch": 1.7070909933570215,
"grad_norm": 0.90234375,
"learning_rate": 0.00011213396499131581,
"loss": 0.1615,
"step": 11050
},
{
"epoch": 1.7148153869921212,
"grad_norm": 1.0,
"learning_rate": 0.00011112969925653618,
"loss": 0.1344,
"step": 11100
},
{
"epoch": 1.722539780627221,
"grad_norm": 0.50390625,
"learning_rate": 0.00011012429564043667,
"loss": 0.155,
"step": 11150
},
{
"epoch": 1.7302641742623204,
"grad_norm": 0.51953125,
"learning_rate": 0.00010911785693375478,
"loss": 0.1394,
"step": 11200
},
{
"epoch": 1.73798856789742,
"grad_norm": 0.88671875,
"learning_rate": 0.00010811048603305371,
"loss": 0.1499,
"step": 11250
},
{
"epoch": 1.7457129615325195,
"grad_norm": 1.2890625,
"learning_rate": 0.00010710228593020271,
"loss": 0.1247,
"step": 11300
},
{
"epoch": 1.7534373551676192,
"grad_norm": 4.0,
"learning_rate": 0.00010609335970184715,
"loss": 0.1259,
"step": 11350
},
{
"epoch": 1.761161748802719,
"grad_norm": 1.7421875,
"learning_rate": 0.00010508381049887029,
"loss": 0.1631,
"step": 11400
},
{
"epoch": 1.7688861424378186,
"grad_norm": 1.0390625,
"learning_rate": 0.00010407374153584716,
"loss": 0.1414,
"step": 11450
},
{
"epoch": 1.7766105360729183,
"grad_norm": 1.3125,
"learning_rate": 0.00010306325608049227,
"loss": 0.1276,
"step": 11500
},
{
"epoch": 1.784334929708018,
"grad_norm": 1.5625,
"learning_rate": 0.0001020524574431015,
"loss": 0.1586,
"step": 11550
},
{
"epoch": 1.7920593233431177,
"grad_norm": 2.734375,
"learning_rate": 0.00010104144896598997,
"loss": 0.1461,
"step": 11600
},
{
"epoch": 1.7997837169782174,
"grad_norm": 0.7734375,
"learning_rate": 0.00010003033401292642,
"loss": 0.1247,
"step": 11650
},
{
"epoch": 1.8075081106133168,
"grad_norm": 3.03125,
"learning_rate": 9.901921595856549e-05,
"loss": 0.1482,
"step": 11700
},
{
"epoch": 1.8152325042484165,
"grad_norm": 1.1796875,
"learning_rate": 9.800819817787906e-05,
"loss": 0.1543,
"step": 11750
},
{
"epoch": 1.822956897883516,
"grad_norm": 0.3828125,
"learning_rate": 9.699738403558688e-05,
"loss": 0.1474,
"step": 11800
},
{
"epoch": 1.8306812915186157,
"grad_norm": 2.46875,
"learning_rate": 9.59868768755894e-05,
"loss": 0.139,
"step": 11850
},
{
"epoch": 1.8384056851537154,
"grad_norm": 0.9921875,
"learning_rate": 9.497678001040152e-05,
"loss": 0.1368,
"step": 11900
},
{
"epoch": 1.846130078788815,
"grad_norm": 1.6640625,
"learning_rate": 9.396719671059039e-05,
"loss": 0.1191,
"step": 11950
},
{
"epoch": 1.8538544724239148,
"grad_norm": 1.0859375,
"learning_rate": 9.295823019421707e-05,
"loss": 0.1345,
"step": 12000
},
{
"epoch": 1.8615788660590145,
"grad_norm": 1.0,
"learning_rate": 9.194998361628373e-05,
"loss": 0.1333,
"step": 12050
},
{
"epoch": 1.8693032596941141,
"grad_norm": 2.40625,
"learning_rate": 9.094256005818736e-05,
"loss": 0.1419,
"step": 12100
},
{
"epoch": 1.8770276533292136,
"grad_norm": 2.765625,
"learning_rate": 8.993606251718072e-05,
"loss": 0.1546,
"step": 12150
},
{
"epoch": 1.8847520469643133,
"grad_norm": 3.109375,
"learning_rate": 8.893059389584226e-05,
"loss": 0.1493,
"step": 12200
},
{
"epoch": 1.892476440599413,
"grad_norm": 3.515625,
"learning_rate": 8.792625699155531e-05,
"loss": 0.1421,
"step": 12250
},
{
"epoch": 1.9002008342345125,
"grad_norm": 1.2421875,
"learning_rate": 8.692315448599864e-05,
"loss": 0.1322,
"step": 12300
},
{
"epoch": 1.9079252278696122,
"grad_norm": 0.8046875,
"learning_rate": 8.592138893464792e-05,
"loss": 0.136,
"step": 12350
},
{
"epoch": 1.9156496215047119,
"grad_norm": 2.21875,
"learning_rate": 8.492106275629117e-05,
"loss": 0.1334,
"step": 12400
},
{
"epoch": 1.9233740151398115,
"grad_norm": 1.15625,
"learning_rate": 8.392227822255723e-05,
"loss": 0.1314,
"step": 12450
},
{
"epoch": 1.9310984087749112,
"grad_norm": 0.390625,
"learning_rate": 8.292513744745992e-05,
"loss": 0.1123,
"step": 12500
},
{
"epoch": 1.938822802410011,
"grad_norm": 3.921875,
"learning_rate": 8.192974237695798e-05,
"loss": 0.1132,
"step": 12550
},
{
"epoch": 1.9465471960451106,
"grad_norm": 1.3984375,
"learning_rate": 8.093619477853233e-05,
"loss": 0.112,
"step": 12600
},
{
"epoch": 1.95427158968021,
"grad_norm": 1.4609375,
"learning_rate": 7.99445962307815e-05,
"loss": 0.1074,
"step": 12650
},
{
"epoch": 1.9619959833153098,
"grad_norm": 1.359375,
"learning_rate": 7.89550481130364e-05,
"loss": 0.1303,
"step": 12700
},
{
"epoch": 1.9697203769504092,
"grad_norm": 0.39453125,
"learning_rate": 7.796765159499558e-05,
"loss": 0.1272,
"step": 12750
},
{
"epoch": 1.977444770585509,
"grad_norm": 1.078125,
"learning_rate": 7.698250762638167e-05,
"loss": 0.1154,
"step": 12800
},
{
"epoch": 1.9851691642206086,
"grad_norm": 3.734375,
"learning_rate": 7.599971692662056e-05,
"loss": 0.1245,
"step": 12850
},
{
"epoch": 1.9928935578557083,
"grad_norm": 0.447265625,
"learning_rate": 7.501937997454392e-05,
"loss": 0.1121,
"step": 12900
},
{
"epoch": 2.000617951490808,
"grad_norm": 1.578125,
"learning_rate": 7.404159699811656e-05,
"loss": 0.1051,
"step": 12950
},
{
"epoch": 2.0083423451259077,
"grad_norm": 0.87109375,
"learning_rate": 7.306646796418912e-05,
"loss": 0.0843,
"step": 13000
},
{
"epoch": 2.0160667387610074,
"grad_norm": 0.8359375,
"learning_rate": 7.209409256827782e-05,
"loss": 0.0768,
"step": 13050
},
{
"epoch": 2.023791132396107,
"grad_norm": 0.98828125,
"learning_rate": 7.112457022437155e-05,
"loss": 0.0752,
"step": 13100
},
{
"epoch": 2.0315155260312068,
"grad_norm": 1.2734375,
"learning_rate": 7.015800005476818e-05,
"loss": 0.0776,
"step": 13150
},
{
"epoch": 2.039239919666306,
"grad_norm": 1.1015625,
"learning_rate": 6.919448087994023e-05,
"loss": 0.0857,
"step": 13200
},
{
"epoch": 2.0469643133014057,
"grad_norm": 2.609375,
"learning_rate": 6.823411120843188e-05,
"loss": 0.0769,
"step": 13250
},
{
"epoch": 2.0546887069365054,
"grad_norm": 0.80078125,
"learning_rate": 6.727698922678745e-05,
"loss": 0.0902,
"step": 13300
},
{
"epoch": 2.062413100571605,
"grad_norm": 0.796875,
"learning_rate": 6.632321278951299e-05,
"loss": 0.0841,
"step": 13350
},
{
"epoch": 2.070137494206705,
"grad_norm": 1.0546875,
"learning_rate": 6.537287940907194e-05,
"loss": 0.0771,
"step": 13400
},
{
"epoch": 2.0778618878418045,
"grad_norm": 0.62890625,
"learning_rate": 6.442608624591541e-05,
"loss": 0.0785,
"step": 13450
},
{
"epoch": 2.085586281476904,
"grad_norm": 1.609375,
"learning_rate": 6.348293009854896e-05,
"loss": 0.0795,
"step": 13500
},
{
"epoch": 2.093310675112004,
"grad_norm": 3.1875,
"learning_rate": 6.254350739363564e-05,
"loss": 0.0742,
"step": 13550
},
{
"epoch": 2.1010350687471036,
"grad_norm": 0.373046875,
"learning_rate": 6.160791417613805e-05,
"loss": 0.079,
"step": 13600
},
{
"epoch": 2.108759462382203,
"grad_norm": 0.74609375,
"learning_rate": 6.067624609949837e-05,
"loss": 0.0717,
"step": 13650
},
{
"epoch": 2.1164838560173025,
"grad_norm": 0.61328125,
"learning_rate": 5.974859841585925e-05,
"loss": 0.0743,
"step": 13700
},
{
"epoch": 2.124208249652402,
"grad_norm": 2.171875,
"learning_rate": 5.882506596632517e-05,
"loss": 0.0726,
"step": 13750
},
{
"epoch": 2.131932643287502,
"grad_norm": 0.94140625,
"learning_rate": 5.790574317126625e-05,
"loss": 0.0848,
"step": 13800
},
{
"epoch": 2.1396570369226016,
"grad_norm": 0.58203125,
"learning_rate": 5.699072402066468e-05,
"loss": 0.0738,
"step": 13850
},
{
"epoch": 2.1473814305577013,
"grad_norm": 1.3671875,
"learning_rate": 5.608010206450549e-05,
"loss": 0.0675,
"step": 13900
},
{
"epoch": 2.155105824192801,
"grad_norm": 2.5,
"learning_rate": 5.5173970403212174e-05,
"loss": 0.0886,
"step": 13950
},
{
"epoch": 2.1628302178279006,
"grad_norm": 1.2734375,
"learning_rate": 5.427242167812803e-05,
"loss": 0.0796,
"step": 14000
},
{
"epoch": 2.1705546114630003,
"grad_norm": 0.70703125,
"learning_rate": 5.337554806204497e-05,
"loss": 0.0626,
"step": 14050
},
{
"epoch": 2.1782790050980996,
"grad_norm": 1.3359375,
"learning_rate": 5.2483441249779755e-05,
"loss": 0.0686,
"step": 14100
},
{
"epoch": 2.1860033987331993,
"grad_norm": 0.6796875,
"learning_rate": 5.159619244879936e-05,
"loss": 0.07,
"step": 14150
},
{
"epoch": 2.193727792368299,
"grad_norm": 0.6640625,
"learning_rate": 5.07138923698959e-05,
"loss": 0.0739,
"step": 14200
},
{
"epoch": 2.2014521860033986,
"grad_norm": 1.0,
"learning_rate": 4.983663121791289e-05,
"loss": 0.0724,
"step": 14250
},
{
"epoch": 2.2091765796384983,
"grad_norm": 1.7578125,
"learning_rate": 4.8964498682522374e-05,
"loss": 0.071,
"step": 14300
},
{
"epoch": 2.216900973273598,
"grad_norm": 0.9296875,
"learning_rate": 4.80975839290556e-05,
"loss": 0.0729,
"step": 14350
},
{
"epoch": 2.2246253669086977,
"grad_norm": 2.453125,
"learning_rate": 4.723597558938672e-05,
"loss": 0.0758,
"step": 14400
},
{
"epoch": 2.2323497605437974,
"grad_norm": 1.0234375,
"learning_rate": 4.637976175287129e-05,
"loss": 0.0639,
"step": 14450
},
{
"epoch": 2.240074154178897,
"grad_norm": 1.90625,
"learning_rate": 4.552902995734003e-05,
"loss": 0.0739,
"step": 14500
},
{
"epoch": 2.247798547813997,
"grad_norm": 0.357421875,
"learning_rate": 4.468386718014934e-05,
"loss": 0.0634,
"step": 14550
},
{
"epoch": 2.2555229414490965,
"grad_norm": 1.296875,
"learning_rate": 4.384435982928871e-05,
"loss": 0.0658,
"step": 14600
},
{
"epoch": 2.2632473350841957,
"grad_norm": 2.59375,
"learning_rate": 4.30105937345466e-05,
"loss": 0.07,
"step": 14650
},
{
"epoch": 2.2709717287192954,
"grad_norm": 1.7578125,
"learning_rate": 4.218265413873519e-05,
"loss": 0.0671,
"step": 14700
},
{
"epoch": 2.278696122354395,
"grad_norm": 0.9765625,
"learning_rate": 4.136062568897556e-05,
"loss": 0.081,
"step": 14750
},
{
"epoch": 2.286420515989495,
"grad_norm": 0.50390625,
"learning_rate": 4.054459242804338e-05,
"loss": 0.0746,
"step": 14800
},
{
"epoch": 2.2941449096245945,
"grad_norm": 0.5703125,
"learning_rate": 3.973463778577634e-05,
"loss": 0.0755,
"step": 14850
},
{
"epoch": 2.301869303259694,
"grad_norm": 0.484375,
"learning_rate": 3.89308445705449e-05,
"loss": 0.0626,
"step": 14900
},
{
"epoch": 2.309593696894794,
"grad_norm": 0.60546875,
"learning_rate": 3.813329496078553e-05,
"loss": 0.0772,
"step": 14950
},
{
"epoch": 2.3173180905298936,
"grad_norm": 0.3984375,
"learning_rate": 3.734207049659937e-05,
"loss": 0.0705,
"step": 15000
},
{
"epoch": 2.3250424841649933,
"grad_norm": 0.482421875,
"learning_rate": 3.65572520714155e-05,
"loss": 0.0748,
"step": 15050
},
{
"epoch": 2.3327668778000925,
"grad_norm": 0.67578125,
"learning_rate": 3.577891992372066e-05,
"loss": 0.0687,
"step": 15100
},
{
"epoch": 2.340491271435192,
"grad_norm": 1.46875,
"learning_rate": 3.5007153628855546e-05,
"loss": 0.063,
"step": 15150
},
{
"epoch": 2.348215665070292,
"grad_norm": 0.73046875,
"learning_rate": 3.4242032090879536e-05,
"loss": 0.0662,
"step": 15200
},
{
"epoch": 2.3559400587053916,
"grad_norm": 0.5625,
"learning_rate": 3.3483633534503413e-05,
"loss": 0.0731,
"step": 15250
},
{
"epoch": 2.3636644523404913,
"grad_norm": 1.34375,
"learning_rate": 3.27320354970919e-05,
"loss": 0.0733,
"step": 15300
},
{
"epoch": 2.371388845975591,
"grad_norm": 0.77734375,
"learning_rate": 3.1987314820736257e-05,
"loss": 0.0645,
"step": 15350
},
{
"epoch": 2.3791132396106907,
"grad_norm": 0.515625,
"learning_rate": 3.12495476443983e-05,
"loss": 0.0822,
"step": 15400
},
{
"epoch": 2.3868376332457903,
"grad_norm": 0.486328125,
"learning_rate": 3.051880939612592e-05,
"loss": 0.0621,
"step": 15450
},
{
"epoch": 2.39456202688089,
"grad_norm": 1.4296875,
"learning_rate": 2.9795174785341384e-05,
"loss": 0.0744,
"step": 15500
},
{
"epoch": 2.4022864205159893,
"grad_norm": 0.4453125,
"learning_rate": 2.9078717795203482e-05,
"loss": 0.0648,
"step": 15550
},
{
"epoch": 2.410010814151089,
"grad_norm": 0.6640625,
"learning_rate": 2.836951167504317e-05,
"loss": 0.0699,
"step": 15600
},
{
"epoch": 2.4177352077861887,
"grad_norm": 1.1875,
"learning_rate": 2.766762893287509e-05,
"loss": 0.0666,
"step": 15650
},
{
"epoch": 2.4254596014212884,
"grad_norm": 1.015625,
"learning_rate": 2.6973141327984232e-05,
"loss": 0.066,
"step": 15700
},
{
"epoch": 2.433183995056388,
"grad_norm": 0.671875,
"learning_rate": 2.6286119863589532e-05,
"loss": 0.0709,
"step": 15750
},
{
"epoch": 2.4409083886914877,
"grad_norm": 0.8515625,
"learning_rate": 2.5606634779584414e-05,
"loss": 0.0598,
"step": 15800
},
{
"epoch": 2.4486327823265874,
"grad_norm": 1.078125,
"learning_rate": 2.4934755545355926e-05,
"loss": 0.0824,
"step": 15850
},
{
"epoch": 2.456357175961687,
"grad_norm": 0.49609375,
"learning_rate": 2.4270550852681916e-05,
"loss": 0.067,
"step": 15900
},
{
"epoch": 2.464081569596787,
"grad_norm": 0.59375,
"learning_rate": 2.361408860870843e-05,
"loss": 0.0591,
"step": 15950
},
{
"epoch": 2.471805963231886,
"grad_norm": 1.09375,
"learning_rate": 2.296543592900683e-05,
"loss": 0.0662,
"step": 16000
},
{
"epoch": 2.4795303568669858,
"grad_norm": 2.265625,
"learning_rate": 2.2324659130712e-05,
"loss": 0.0623,
"step": 16050
},
{
"epoch": 2.4872547505020854,
"grad_norm": 0.84375,
"learning_rate": 2.169182372574231e-05,
"loss": 0.0692,
"step": 16100
},
{
"epoch": 2.494979144137185,
"grad_norm": 0.3828125,
"learning_rate": 2.1066994414101703e-05,
"loss": 0.0577,
"step": 16150
},
{
"epoch": 2.502703537772285,
"grad_norm": 2.140625,
"learning_rate": 2.0450235077264955e-05,
"loss": 0.0685,
"step": 16200
},
{
"epoch": 2.5104279314073845,
"grad_norm": 0.85546875,
"learning_rate": 1.984160877164637e-05,
"loss": 0.0647,
"step": 16250
},
{
"epoch": 2.518152325042484,
"grad_norm": 0.48046875,
"learning_rate": 1.9241177722153235e-05,
"loss": 0.0687,
"step": 16300
},
{
"epoch": 2.525876718677584,
"grad_norm": 1.640625,
"learning_rate": 1.8649003315823887e-05,
"loss": 0.068,
"step": 16350
},
{
"epoch": 2.5336011123126836,
"grad_norm": 1.09375,
"learning_rate": 1.80651460955517e-05,
"loss": 0.0532,
"step": 16400
},
{
"epoch": 2.541325505947783,
"grad_norm": 0.380859375,
"learning_rate": 1.7489665753895146e-05,
"loss": 0.0681,
"step": 16450
},
{
"epoch": 2.549049899582883,
"grad_norm": 0.9140625,
"learning_rate": 1.6922621126975193e-05,
"loss": 0.063,
"step": 16500
},
{
"epoch": 2.556774293217982,
"grad_norm": 0.5546875,
"learning_rate": 1.6364070188459646e-05,
"loss": 0.07,
"step": 16550
},
{
"epoch": 2.564498686853082,
"grad_norm": 0.384765625,
"learning_rate": 1.5814070043636342e-05,
"loss": 0.0799,
"step": 16600
},
{
"epoch": 2.5722230804881816,
"grad_norm": 0.62890625,
"learning_rate": 1.5272676923574637e-05,
"loss": 0.0697,
"step": 16650
},
{
"epoch": 2.5799474741232813,
"grad_norm": 0.3671875,
"learning_rate": 1.473994617937643e-05,
"loss": 0.0637,
"step": 16700
},
{
"epoch": 2.587671867758381,
"grad_norm": 0.5546875,
"learning_rate": 1.4215932276517253e-05,
"loss": 0.0626,
"step": 16750
},
{
"epoch": 2.5953962613934807,
"grad_norm": 0.7421875,
"learning_rate": 1.3700688789277738e-05,
"loss": 0.0735,
"step": 16800
},
{
"epoch": 2.6031206550285804,
"grad_norm": 0.97265625,
"learning_rate": 1.319426839526633e-05,
"loss": 0.062,
"step": 16850
},
{
"epoch": 2.61084504866368,
"grad_norm": 0.578125,
"learning_rate": 1.2696722870033529e-05,
"loss": 0.0632,
"step": 16900
},
{
"epoch": 2.6185694422987797,
"grad_norm": 1.09375,
"learning_rate": 1.2208103081778555e-05,
"loss": 0.0677,
"step": 16950
},
{
"epoch": 2.626293835933879,
"grad_norm": 1.3046875,
"learning_rate": 1.1728458986148627e-05,
"loss": 0.0605,
"step": 17000
},
{
"epoch": 2.6340182295689787,
"grad_norm": 1.0234375,
"learning_rate": 1.1257839621131584e-05,
"loss": 0.0606,
"step": 17050
},
{
"epoch": 2.6417426232040784,
"grad_norm": 0.8046875,
"learning_rate": 1.0796293102042243e-05,
"loss": 0.0641,
"step": 17100
},
{
"epoch": 2.649467016839178,
"grad_norm": 0.64453125,
"learning_rate": 1.034386661660337e-05,
"loss": 0.0632,
"step": 17150
},
{
"epoch": 2.6571914104742778,
"grad_norm": 1.21875,
"learning_rate": 9.900606420121062e-06,
"loss": 0.0671,
"step": 17200
},
{
"epoch": 2.6649158041093775,
"grad_norm": 0.81640625,
"learning_rate": 9.466557830755862e-06,
"loss": 0.0646,
"step": 17250
},
{
"epoch": 2.672640197744477,
"grad_norm": 0.91015625,
"learning_rate": 9.041765224889399e-06,
"loss": 0.0674,
"step": 17300
},
{
"epoch": 2.680364591379577,
"grad_norm": 1.1015625,
"learning_rate": 8.62627203258748e-06,
"loss": 0.058,
"step": 17350
},
{
"epoch": 2.6880889850146765,
"grad_norm": 1.09375,
"learning_rate": 8.220120733159831e-06,
"loss": 0.0563,
"step": 17400
},
{
"epoch": 2.6958133786497758,
"grad_norm": 0.91796875,
"learning_rate": 7.823352850817124e-06,
"loss": 0.0673,
"step": 17450
},
{
"epoch": 2.703537772284876,
"grad_norm": 3.546875,
"learning_rate": 7.4360089504256215e-06,
"loss": 0.0664,
"step": 17500
},
{
"epoch": 2.711262165919975,
"grad_norm": 0.78125,
"learning_rate": 7.058128633359806e-06,
"loss": 0.0691,
"step": 17550
},
{
"epoch": 2.718986559555075,
"grad_norm": 1.6484375,
"learning_rate": 6.689750533453765e-06,
"loss": 0.0606,
"step": 17600
},
{
"epoch": 2.7267109531901745,
"grad_norm": 0.609375,
"learning_rate": 6.33091231305114e-06,
"loss": 0.0665,
"step": 17650
},
{
"epoch": 2.7344353468252742,
"grad_norm": 0.67578125,
"learning_rate": 5.9816506591547475e-06,
"loss": 0.0623,
"step": 17700
},
{
"epoch": 2.742159740460374,
"grad_norm": 0.72265625,
"learning_rate": 5.6420012796757015e-06,
"loss": 0.0665,
"step": 17750
},
{
"epoch": 2.7498841340954736,
"grad_norm": 0.625,
"learning_rate": 5.311998899782711e-06,
"loss": 0.057,
"step": 17800
},
{
"epoch": 2.7576085277305733,
"grad_norm": 0.392578125,
"learning_rate": 4.99167725835179e-06,
"loss": 0.0732,
"step": 17850
},
{
"epoch": 2.7653329213656725,
"grad_norm": 0.8671875,
"learning_rate": 4.681069104516922e-06,
"loss": 0.0653,
"step": 17900
},
{
"epoch": 2.7730573150007727,
"grad_norm": 1.3046875,
"learning_rate": 4.380206194321812e-06,
"loss": 0.0636,
"step": 17950
},
{
"epoch": 2.780781708635872,
"grad_norm": 1.6875,
"learning_rate": 4.089119287473242e-06,
"loss": 0.0631,
"step": 18000
},
{
"epoch": 2.7885061022709716,
"grad_norm": 0.59375,
"learning_rate": 3.8078381441961407e-06,
"loss": 0.063,
"step": 18050
},
{
"epoch": 2.7962304959060713,
"grad_norm": 0.796875,
"learning_rate": 3.536391522191096e-06,
"loss": 0.0797,
"step": 18100
},
{
"epoch": 2.803954889541171,
"grad_norm": 1.234375,
"learning_rate": 3.2748071736941164e-06,
"loss": 0.0595,
"step": 18150
},
{
"epoch": 2.8116792831762707,
"grad_norm": 0.6015625,
"learning_rate": 3.0231118426393035e-06,
"loss": 0.0596,
"step": 18200
},
{
"epoch": 2.8194036768113704,
"grad_norm": 1.828125,
"learning_rate": 2.78133126192468e-06,
"loss": 0.0642,
"step": 18250
},
{
"epoch": 2.82712807044647,
"grad_norm": 0.96484375,
"learning_rate": 2.5494901507811418e-06,
"loss": 0.0549,
"step": 18300
},
{
"epoch": 2.8348524640815693,
"grad_norm": 1.8359375,
"learning_rate": 2.327612212245389e-06,
"loss": 0.0636,
"step": 18350
},
{
"epoch": 2.8425768577166695,
"grad_norm": 0.89453125,
"learning_rate": 2.1157201307364315e-06,
"loss": 0.0667,
"step": 18400
},
{
"epoch": 2.8503012513517687,
"grad_norm": 0.49609375,
"learning_rate": 1.913835569736444e-06,
"loss": 0.0613,
"step": 18450
},
{
"epoch": 2.8580256449868684,
"grad_norm": 1.0703125,
"learning_rate": 1.7219791695758824e-06,
"loss": 0.071,
"step": 18500
},
{
"epoch": 2.865750038621968,
"grad_norm": 0.51171875,
"learning_rate": 1.5401705453233051e-06,
"loss": 0.076,
"step": 18550
},
{
"epoch": 2.873474432257068,
"grad_norm": 0.39453125,
"learning_rate": 1.3684282847799213e-06,
"loss": 0.0667,
"step": 18600
},
{
"epoch": 2.8811988258921675,
"grad_norm": 0.703125,
"learning_rate": 1.2067699465792336e-06,
"loss": 0.0651,
"step": 18650
},
{
"epoch": 2.888923219527267,
"grad_norm": 0.455078125,
"learning_rate": 1.0552120583918412e-06,
"loss": 0.0575,
"step": 18700
},
{
"epoch": 2.896647613162367,
"grad_norm": 1.2734375,
"learning_rate": 9.137701152356792e-07,
"loss": 0.0564,
"step": 18750
},
{
"epoch": 2.9043720067974665,
"grad_norm": 0.52734375,
"learning_rate": 7.824585778919091e-07,
"loss": 0.0699,
"step": 18800
},
{
"epoch": 2.9120964004325662,
"grad_norm": 0.67578125,
"learning_rate": 6.612908714263455e-07,
"loss": 0.0628,
"step": 18850
},
{
"epoch": 2.9198207940676655,
"grad_norm": 0.443359375,
"learning_rate": 5.502793838170428e-07,
"loss": 0.0642,
"step": 18900
},
{
"epoch": 2.927545187702765,
"grad_norm": 0.435546875,
"learning_rate": 4.49435464687642e-07,
"loss": 0.0642,
"step": 18950
},
{
"epoch": 2.935269581337865,
"grad_norm": 0.5625,
"learning_rate": 3.587694241470874e-07,
"loss": 0.0614,
"step": 19000
},
{
"epoch": 2.9429939749729646,
"grad_norm": 0.412109375,
"learning_rate": 2.7829053173550333e-07,
"loss": 0.0658,
"step": 19050
},
{
"epoch": 2.9507183686080642,
"grad_norm": 0.419921875,
"learning_rate": 2.0800701547651902e-07,
"loss": 0.0675,
"step": 19100
},
{
"epoch": 2.958442762243164,
"grad_norm": 1.3359375,
"learning_rate": 1.479260610360078e-07,
"loss": 0.0703,
"step": 19150
},
{
"epoch": 2.9661671558782636,
"grad_norm": 0.39453125,
"learning_rate": 9.805381098746402e-08,
"loss": 0.0703,
"step": 19200
},
{
"epoch": 2.9738915495133633,
"grad_norm": 1.171875,
"learning_rate": 5.839536418401625e-08,
"loss": 0.0633,
"step": 19250
},
{
"epoch": 2.981615943148463,
"grad_norm": 2.765625,
"learning_rate": 2.8954775237088894e-08,
"loss": 0.0634,
"step": 19300
},
{
"epoch": 2.9893403367835623,
"grad_norm": 1.6328125,
"learning_rate": 9.735054101878138e-09,
"loss": 0.058,
"step": 19350
},
{
"epoch": 2.9970647304186624,
"grad_norm": 0.9296875,
"learning_rate": 7.381657696425848e-10,
"loss": 0.0586,
"step": 19400
}
],
"logging_steps": 50,
"max_steps": 19419,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 0,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.3806574237646848e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}