BAAI
/

Safetensors
xlm-roberta
IndustryCorpus2_DataRater / trainer_state.json
MonteXiaofeng's picture
Upload
39df759 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9974025974025974,
"eval_steps": 500,
"global_step": 216,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 176.64862060546875,
"learning_rate": 9.999998677875842e-06,
"loss": 11.3275,
"step": 1
},
{
"epoch": 0.01,
"grad_norm": 130.30368041992188,
"learning_rate": 9.999994711504062e-06,
"loss": 6.8023,
"step": 2
},
{
"epoch": 0.01,
"grad_norm": 103.31938171386719,
"learning_rate": 9.99998810088676e-06,
"loss": 4.4755,
"step": 3
},
{
"epoch": 0.02,
"grad_norm": 74.88383483886719,
"learning_rate": 9.999978846027432e-06,
"loss": 2.9368,
"step": 4
},
{
"epoch": 0.02,
"grad_norm": 42.227046966552734,
"learning_rate": 9.999966946930972e-06,
"loss": 1.2319,
"step": 5
},
{
"epoch": 0.03,
"grad_norm": 12.707175254821777,
"learning_rate": 9.999952403603674e-06,
"loss": 0.8505,
"step": 6
},
{
"epoch": 0.03,
"grad_norm": 15.292159080505371,
"learning_rate": 9.999935216053227e-06,
"loss": 0.9937,
"step": 7
},
{
"epoch": 0.04,
"grad_norm": 39.702308654785156,
"learning_rate": 9.999915384288723e-06,
"loss": 1.2296,
"step": 8
},
{
"epoch": 0.04,
"grad_norm": 45.62909698486328,
"learning_rate": 9.999892908320647e-06,
"loss": 1.6743,
"step": 9
},
{
"epoch": 0.05,
"grad_norm": 44.297847747802734,
"learning_rate": 9.999867788160888e-06,
"loss": 1.5054,
"step": 10
},
{
"epoch": 0.05,
"grad_norm": 36.63005828857422,
"learning_rate": 9.99984002382273e-06,
"loss": 1.3223,
"step": 11
},
{
"epoch": 0.06,
"grad_norm": 19.54110336303711,
"learning_rate": 9.999809615320857e-06,
"loss": 0.8807,
"step": 12
},
{
"epoch": 0.06,
"grad_norm": 21.871042251586914,
"learning_rate": 9.999776562671349e-06,
"loss": 1.1686,
"step": 13
},
{
"epoch": 0.06,
"grad_norm": 8.16606616973877,
"learning_rate": 9.999740865891686e-06,
"loss": 0.8842,
"step": 14
},
{
"epoch": 0.07,
"grad_norm": 6.233722686767578,
"learning_rate": 9.99970252500075e-06,
"loss": 0.6474,
"step": 15
},
{
"epoch": 0.07,
"grad_norm": 16.253034591674805,
"learning_rate": 9.999661540018812e-06,
"loss": 0.8965,
"step": 16
},
{
"epoch": 0.08,
"grad_norm": 12.703817367553711,
"learning_rate": 9.99961791096755e-06,
"loss": 0.8819,
"step": 17
},
{
"epoch": 0.08,
"grad_norm": 14.576752662658691,
"learning_rate": 9.999571637870035e-06,
"loss": 0.8724,
"step": 18
},
{
"epoch": 0.09,
"grad_norm": 15.657022476196289,
"learning_rate": 9.999522720750743e-06,
"loss": 0.7275,
"step": 19
},
{
"epoch": 0.09,
"grad_norm": 7.936721324920654,
"learning_rate": 9.999471159635538e-06,
"loss": 0.9085,
"step": 20
},
{
"epoch": 0.1,
"grad_norm": 12.384129524230957,
"learning_rate": 9.999416954551693e-06,
"loss": 0.5257,
"step": 21
},
{
"epoch": 0.1,
"grad_norm": 7.639270782470703,
"learning_rate": 9.999360105527871e-06,
"loss": 0.765,
"step": 22
},
{
"epoch": 0.11,
"grad_norm": 8.124340057373047,
"learning_rate": 9.999300612594139e-06,
"loss": 0.6521,
"step": 23
},
{
"epoch": 0.11,
"grad_norm": 15.96132755279541,
"learning_rate": 9.999238475781957e-06,
"loss": 0.8599,
"step": 24
},
{
"epoch": 0.12,
"grad_norm": 6.499070167541504,
"learning_rate": 9.999173695124188e-06,
"loss": 0.5235,
"step": 25
},
{
"epoch": 0.12,
"grad_norm": 13.837294578552246,
"learning_rate": 9.999106270655093e-06,
"loss": 0.7003,
"step": 26
},
{
"epoch": 0.12,
"grad_norm": 8.020848274230957,
"learning_rate": 9.999036202410324e-06,
"loss": 0.7127,
"step": 27
},
{
"epoch": 0.13,
"grad_norm": 7.126096248626709,
"learning_rate": 9.998963490426943e-06,
"loss": 0.6233,
"step": 28
},
{
"epoch": 0.13,
"grad_norm": 9.910087585449219,
"learning_rate": 9.998888134743398e-06,
"loss": 0.6812,
"step": 29
},
{
"epoch": 0.14,
"grad_norm": 17.2391414642334,
"learning_rate": 9.998810135399545e-06,
"loss": 0.6481,
"step": 30
},
{
"epoch": 0.14,
"grad_norm": 18.45667266845703,
"learning_rate": 9.998729492436635e-06,
"loss": 0.6672,
"step": 31
},
{
"epoch": 0.15,
"grad_norm": 22.11907196044922,
"learning_rate": 9.99864620589731e-06,
"loss": 0.4814,
"step": 32
},
{
"epoch": 0.15,
"grad_norm": 11.804239273071289,
"learning_rate": 9.99856027582562e-06,
"loss": 0.6971,
"step": 33
},
{
"epoch": 0.16,
"grad_norm": 6.253346920013428,
"learning_rate": 9.998471702267007e-06,
"loss": 0.4941,
"step": 34
},
{
"epoch": 0.16,
"grad_norm": 9.442488670349121,
"learning_rate": 9.998380485268317e-06,
"loss": 0.5258,
"step": 35
},
{
"epoch": 0.17,
"grad_norm": 10.5572509765625,
"learning_rate": 9.998286624877786e-06,
"loss": 0.5023,
"step": 36
},
{
"epoch": 0.17,
"grad_norm": 11.808622360229492,
"learning_rate": 9.998190121145056e-06,
"loss": 0.5166,
"step": 37
},
{
"epoch": 0.18,
"grad_norm": 5.973758697509766,
"learning_rate": 9.99809097412116e-06,
"loss": 0.4458,
"step": 38
},
{
"epoch": 0.18,
"grad_norm": 7.20625114440918,
"learning_rate": 9.997989183858531e-06,
"loss": 0.4228,
"step": 39
},
{
"epoch": 0.18,
"grad_norm": 13.362131118774414,
"learning_rate": 9.997884750411004e-06,
"loss": 0.681,
"step": 40
},
{
"epoch": 0.19,
"grad_norm": 9.089489936828613,
"learning_rate": 9.997777673833807e-06,
"loss": 0.4588,
"step": 41
},
{
"epoch": 0.19,
"grad_norm": 6.178775787353516,
"learning_rate": 9.997667954183566e-06,
"loss": 0.483,
"step": 42
},
{
"epoch": 0.2,
"grad_norm": 8.83432674407959,
"learning_rate": 9.997555591518307e-06,
"loss": 0.4187,
"step": 43
},
{
"epoch": 0.2,
"grad_norm": 14.531988143920898,
"learning_rate": 9.997440585897455e-06,
"loss": 0.4184,
"step": 44
},
{
"epoch": 0.21,
"grad_norm": 7.901028633117676,
"learning_rate": 9.997322937381829e-06,
"loss": 0.7002,
"step": 45
},
{
"epoch": 0.21,
"grad_norm": 6.49567985534668,
"learning_rate": 9.997202646033649e-06,
"loss": 0.4165,
"step": 46
},
{
"epoch": 0.22,
"grad_norm": 32.977596282958984,
"learning_rate": 9.997079711916527e-06,
"loss": 0.512,
"step": 47
},
{
"epoch": 0.22,
"grad_norm": 7.730500221252441,
"learning_rate": 9.99695413509548e-06,
"loss": 0.5751,
"step": 48
},
{
"epoch": 0.23,
"grad_norm": 7.134888172149658,
"learning_rate": 9.996825915636918e-06,
"loss": 0.5055,
"step": 49
},
{
"epoch": 0.23,
"grad_norm": 11.032304763793945,
"learning_rate": 9.996695053608651e-06,
"loss": 0.5691,
"step": 50
},
{
"epoch": 0.24,
"grad_norm": 28.639476776123047,
"learning_rate": 9.996561549079886e-06,
"loss": 0.4184,
"step": 51
},
{
"epoch": 0.24,
"grad_norm": 8.934990882873535,
"learning_rate": 9.996425402121224e-06,
"loss": 0.4129,
"step": 52
},
{
"epoch": 0.24,
"grad_norm": 8.567286491394043,
"learning_rate": 9.996286612804666e-06,
"loss": 0.5257,
"step": 53
},
{
"epoch": 0.25,
"grad_norm": 12.532737731933594,
"learning_rate": 9.996145181203616e-06,
"loss": 0.4802,
"step": 54
},
{
"epoch": 0.25,
"grad_norm": 7.696206092834473,
"learning_rate": 9.996001107392864e-06,
"loss": 0.5477,
"step": 55
},
{
"epoch": 0.26,
"grad_norm": 7.3067426681518555,
"learning_rate": 9.995854391448607e-06,
"loss": 0.4384,
"step": 56
},
{
"epoch": 0.26,
"grad_norm": 7.36694860458374,
"learning_rate": 9.995705033448435e-06,
"loss": 0.483,
"step": 57
},
{
"epoch": 0.27,
"grad_norm": 16.650466918945312,
"learning_rate": 9.995553033471335e-06,
"loss": 0.5578,
"step": 58
},
{
"epoch": 0.27,
"grad_norm": 11.836296081542969,
"learning_rate": 9.995398391597693e-06,
"loss": 0.4419,
"step": 59
},
{
"epoch": 0.28,
"grad_norm": 6.363118648529053,
"learning_rate": 9.99524110790929e-06,
"loss": 0.424,
"step": 60
},
{
"epoch": 0.28,
"grad_norm": 7.7522101402282715,
"learning_rate": 9.995081182489306e-06,
"loss": 0.4357,
"step": 61
},
{
"epoch": 0.29,
"grad_norm": 7.991522312164307,
"learning_rate": 9.99491861542232e-06,
"loss": 0.437,
"step": 62
},
{
"epoch": 0.29,
"grad_norm": 14.417398452758789,
"learning_rate": 9.994753406794303e-06,
"loss": 0.6347,
"step": 63
},
{
"epoch": 0.3,
"grad_norm": 4.412823677062988,
"learning_rate": 9.994585556692624e-06,
"loss": 0.2691,
"step": 64
},
{
"epoch": 0.3,
"grad_norm": 14.479527473449707,
"learning_rate": 9.994415065206053e-06,
"loss": 0.4691,
"step": 65
},
{
"epoch": 0.3,
"grad_norm": 7.585235595703125,
"learning_rate": 9.994241932424755e-06,
"loss": 0.4724,
"step": 66
},
{
"epoch": 0.31,
"grad_norm": 5.731332302093506,
"learning_rate": 9.99406615844029e-06,
"loss": 0.5277,
"step": 67
},
{
"epoch": 0.31,
"grad_norm": 6.436354637145996,
"learning_rate": 9.993887743345613e-06,
"loss": 0.4632,
"step": 68
},
{
"epoch": 0.32,
"grad_norm": 5.3371992111206055,
"learning_rate": 9.993706687235085e-06,
"loss": 0.3623,
"step": 69
},
{
"epoch": 0.32,
"grad_norm": 10.071105003356934,
"learning_rate": 9.993522990204453e-06,
"loss": 0.4256,
"step": 70
},
{
"epoch": 0.33,
"grad_norm": 12.001503944396973,
"learning_rate": 9.993336652350867e-06,
"loss": 0.5697,
"step": 71
},
{
"epoch": 0.33,
"grad_norm": 10.96662712097168,
"learning_rate": 9.993147673772869e-06,
"loss": 0.3635,
"step": 72
},
{
"epoch": 0.34,
"grad_norm": 8.96445083618164,
"learning_rate": 9.992956054570405e-06,
"loss": 0.3633,
"step": 73
},
{
"epoch": 0.34,
"grad_norm": 10.597489356994629,
"learning_rate": 9.99276179484481e-06,
"loss": 0.5179,
"step": 74
},
{
"epoch": 0.35,
"grad_norm": 37.88457107543945,
"learning_rate": 9.992564894698816e-06,
"loss": 0.3815,
"step": 75
},
{
"epoch": 0.35,
"grad_norm": 10.796125411987305,
"learning_rate": 9.992365354236557e-06,
"loss": 0.3985,
"step": 76
},
{
"epoch": 0.36,
"grad_norm": 6.9689860343933105,
"learning_rate": 9.992163173563559e-06,
"loss": 0.3025,
"step": 77
},
{
"epoch": 0.36,
"grad_norm": 9.331859588623047,
"learning_rate": 9.991958352786744e-06,
"loss": 0.3846,
"step": 78
},
{
"epoch": 0.36,
"grad_norm": 8.02157211303711,
"learning_rate": 9.991750892014433e-06,
"loss": 0.421,
"step": 79
},
{
"epoch": 0.37,
"grad_norm": 18.640714645385742,
"learning_rate": 9.991540791356342e-06,
"loss": 0.6442,
"step": 80
},
{
"epoch": 0.37,
"grad_norm": 7.119135856628418,
"learning_rate": 9.99132805092358e-06,
"loss": 0.4399,
"step": 81
},
{
"epoch": 0.38,
"grad_norm": 9.806747436523438,
"learning_rate": 9.991112670828657e-06,
"loss": 0.3378,
"step": 82
},
{
"epoch": 0.38,
"grad_norm": 7.447767734527588,
"learning_rate": 9.990894651185475e-06,
"loss": 0.4278,
"step": 83
},
{
"epoch": 0.39,
"grad_norm": 28.66916847229004,
"learning_rate": 9.990673992109335e-06,
"loss": 0.4325,
"step": 84
},
{
"epoch": 0.39,
"grad_norm": 13.08967399597168,
"learning_rate": 9.990450693716933e-06,
"loss": 0.3821,
"step": 85
},
{
"epoch": 0.4,
"grad_norm": 11.561846733093262,
"learning_rate": 9.99022475612636e-06,
"loss": 0.4719,
"step": 86
},
{
"epoch": 0.4,
"grad_norm": 11.134137153625488,
"learning_rate": 9.9899961794571e-06,
"loss": 0.4789,
"step": 87
},
{
"epoch": 0.41,
"grad_norm": 5.945651531219482,
"learning_rate": 9.989764963830038e-06,
"loss": 0.5293,
"step": 88
},
{
"epoch": 0.41,
"grad_norm": 6.415275573730469,
"learning_rate": 9.989531109367454e-06,
"loss": 0.489,
"step": 89
},
{
"epoch": 0.42,
"grad_norm": 14.867259979248047,
"learning_rate": 9.989294616193018e-06,
"loss": 0.7109,
"step": 90
},
{
"epoch": 0.42,
"grad_norm": 18.615076065063477,
"learning_rate": 9.989055484431803e-06,
"loss": 0.521,
"step": 91
},
{
"epoch": 0.42,
"grad_norm": 16.754840850830078,
"learning_rate": 9.988813714210272e-06,
"loss": 0.4094,
"step": 92
},
{
"epoch": 0.43,
"grad_norm": 6.629899024963379,
"learning_rate": 9.988569305656286e-06,
"loss": 0.4783,
"step": 93
},
{
"epoch": 0.43,
"grad_norm": 8.339197158813477,
"learning_rate": 9.988322258899099e-06,
"loss": 0.4288,
"step": 94
},
{
"epoch": 0.44,
"grad_norm": 8.170295715332031,
"learning_rate": 9.988072574069363e-06,
"loss": 0.4073,
"step": 95
},
{
"epoch": 0.44,
"grad_norm": 9.301414489746094,
"learning_rate": 9.987820251299121e-06,
"loss": 0.5135,
"step": 96
},
{
"epoch": 0.45,
"grad_norm": 24.828275680541992,
"learning_rate": 9.98756529072182e-06,
"loss": 0.5708,
"step": 97
},
{
"epoch": 0.45,
"grad_norm": 8.960503578186035,
"learning_rate": 9.987307692472288e-06,
"loss": 0.4161,
"step": 98
},
{
"epoch": 0.46,
"grad_norm": 8.696434020996094,
"learning_rate": 9.98704745668676e-06,
"loss": 0.4182,
"step": 99
},
{
"epoch": 0.46,
"grad_norm": 9.047277450561523,
"learning_rate": 9.986784583502863e-06,
"loss": 0.3782,
"step": 100
},
{
"epoch": 0.47,
"grad_norm": 15.280583381652832,
"learning_rate": 9.986519073059613e-06,
"loss": 0.5169,
"step": 101
},
{
"epoch": 0.47,
"grad_norm": 11.528756141662598,
"learning_rate": 9.986250925497429e-06,
"loss": 0.549,
"step": 102
},
{
"epoch": 0.48,
"grad_norm": 7.328488349914551,
"learning_rate": 9.98598014095812e-06,
"loss": 0.4786,
"step": 103
},
{
"epoch": 0.48,
"grad_norm": 7.289353847503662,
"learning_rate": 9.985706719584888e-06,
"loss": 0.3513,
"step": 104
},
{
"epoch": 0.48,
"grad_norm": 10.800015449523926,
"learning_rate": 9.985430661522333e-06,
"loss": 0.4872,
"step": 105
},
{
"epoch": 0.49,
"grad_norm": 13.80408000946045,
"learning_rate": 9.985151966916451e-06,
"loss": 0.389,
"step": 106
},
{
"epoch": 0.49,
"grad_norm": 13.14911937713623,
"learning_rate": 9.984870635914625e-06,
"loss": 0.4488,
"step": 107
},
{
"epoch": 0.5,
"grad_norm": 8.940996170043945,
"learning_rate": 9.984586668665641e-06,
"loss": 0.389,
"step": 108
},
{
"epoch": 0.5,
"grad_norm": 7.7654924392700195,
"learning_rate": 9.984300065319673e-06,
"loss": 0.3816,
"step": 109
},
{
"epoch": 0.51,
"grad_norm": 6.716012954711914,
"learning_rate": 9.984010826028289e-06,
"loss": 0.3724,
"step": 110
},
{
"epoch": 0.51,
"grad_norm": 16.961824417114258,
"learning_rate": 9.983718950944457e-06,
"loss": 0.5116,
"step": 111
},
{
"epoch": 0.52,
"grad_norm": 8.058645248413086,
"learning_rate": 9.98342444022253e-06,
"loss": 0.4868,
"step": 112
},
{
"epoch": 0.52,
"grad_norm": 7.157467365264893,
"learning_rate": 9.983127294018267e-06,
"loss": 0.3658,
"step": 113
},
{
"epoch": 0.53,
"grad_norm": 6.30387544631958,
"learning_rate": 9.982827512488809e-06,
"loss": 0.422,
"step": 114
},
{
"epoch": 0.53,
"grad_norm": 5.9396748542785645,
"learning_rate": 9.982525095792694e-06,
"loss": 0.3137,
"step": 115
},
{
"epoch": 0.54,
"grad_norm": 9.931533813476562,
"learning_rate": 9.98222004408986e-06,
"loss": 0.3283,
"step": 116
},
{
"epoch": 0.54,
"grad_norm": 8.188447952270508,
"learning_rate": 9.981912357541628e-06,
"loss": 0.3709,
"step": 117
},
{
"epoch": 0.54,
"grad_norm": 7.596179485321045,
"learning_rate": 9.981602036310722e-06,
"loss": 0.4277,
"step": 118
},
{
"epoch": 0.55,
"grad_norm": 7.1218767166137695,
"learning_rate": 9.981289080561253e-06,
"loss": 0.338,
"step": 119
},
{
"epoch": 0.55,
"grad_norm": 12.419051170349121,
"learning_rate": 9.980973490458728e-06,
"loss": 0.3293,
"step": 120
},
{
"epoch": 0.56,
"grad_norm": 11.043360710144043,
"learning_rate": 9.980655266170049e-06,
"loss": 0.4129,
"step": 121
},
{
"epoch": 0.56,
"grad_norm": 20.372882843017578,
"learning_rate": 9.980334407863506e-06,
"loss": 0.4247,
"step": 122
},
{
"epoch": 0.57,
"grad_norm": 7.637713432312012,
"learning_rate": 9.980010915708785e-06,
"loss": 0.3427,
"step": 123
},
{
"epoch": 0.57,
"grad_norm": 8.155780792236328,
"learning_rate": 9.979684789876965e-06,
"loss": 0.3592,
"step": 124
},
{
"epoch": 0.58,
"grad_norm": 12.149063110351562,
"learning_rate": 9.97935603054052e-06,
"loss": 0.3915,
"step": 125
},
{
"epoch": 0.58,
"grad_norm": 18.71912956237793,
"learning_rate": 9.979024637873309e-06,
"loss": 0.5225,
"step": 126
},
{
"epoch": 0.59,
"grad_norm": 11.921587944030762,
"learning_rate": 9.978690612050594e-06,
"loss": 0.3912,
"step": 127
},
{
"epoch": 0.59,
"grad_norm": 11.384403228759766,
"learning_rate": 9.978353953249023e-06,
"loss": 0.4185,
"step": 128
},
{
"epoch": 0.6,
"grad_norm": 15.501189231872559,
"learning_rate": 9.978014661646637e-06,
"loss": 0.2584,
"step": 129
},
{
"epoch": 0.6,
"grad_norm": 6.86824893951416,
"learning_rate": 9.97767273742287e-06,
"loss": 0.2435,
"step": 130
},
{
"epoch": 0.6,
"grad_norm": 10.46956729888916,
"learning_rate": 9.97732818075855e-06,
"loss": 0.2893,
"step": 131
},
{
"epoch": 0.61,
"grad_norm": 8.8429594039917,
"learning_rate": 9.976980991835896e-06,
"loss": 0.3392,
"step": 132
},
{
"epoch": 0.61,
"grad_norm": 10.744973182678223,
"learning_rate": 9.976631170838516e-06,
"loss": 0.381,
"step": 133
},
{
"epoch": 0.62,
"grad_norm": 11.55877685546875,
"learning_rate": 9.976278717951414e-06,
"loss": 0.5731,
"step": 134
},
{
"epoch": 0.62,
"grad_norm": 5.640347480773926,
"learning_rate": 9.975923633360985e-06,
"loss": 0.3342,
"step": 135
},
{
"epoch": 0.63,
"grad_norm": 5.5956926345825195,
"learning_rate": 9.975565917255017e-06,
"loss": 0.257,
"step": 136
},
{
"epoch": 0.63,
"grad_norm": 9.520169258117676,
"learning_rate": 9.975205569822683e-06,
"loss": 0.4152,
"step": 137
},
{
"epoch": 0.64,
"grad_norm": 12.1378755569458,
"learning_rate": 9.974842591254559e-06,
"loss": 0.398,
"step": 138
},
{
"epoch": 0.64,
"grad_norm": 16.691791534423828,
"learning_rate": 9.974476981742602e-06,
"loss": 0.4328,
"step": 139
},
{
"epoch": 0.65,
"grad_norm": 6.700257778167725,
"learning_rate": 9.974108741480167e-06,
"loss": 0.301,
"step": 140
},
{
"epoch": 0.65,
"grad_norm": 7.227187156677246,
"learning_rate": 9.973737870661995e-06,
"loss": 0.3648,
"step": 141
},
{
"epoch": 0.66,
"grad_norm": 7.791788578033447,
"learning_rate": 9.973364369484222e-06,
"loss": 0.4057,
"step": 142
},
{
"epoch": 0.66,
"grad_norm": 12.814080238342285,
"learning_rate": 9.972988238144373e-06,
"loss": 0.3357,
"step": 143
},
{
"epoch": 0.66,
"grad_norm": 6.2968316078186035,
"learning_rate": 9.972609476841368e-06,
"loss": 0.3597,
"step": 144
},
{
"epoch": 0.67,
"grad_norm": 8.382381439208984,
"learning_rate": 9.972228085775512e-06,
"loss": 0.276,
"step": 145
},
{
"epoch": 0.67,
"grad_norm": 13.944960594177246,
"learning_rate": 9.971844065148504e-06,
"loss": 0.327,
"step": 146
},
{
"epoch": 0.68,
"grad_norm": 18.251310348510742,
"learning_rate": 9.971457415163435e-06,
"loss": 0.5018,
"step": 147
},
{
"epoch": 0.68,
"grad_norm": 16.452730178833008,
"learning_rate": 9.971068136024781e-06,
"loss": 0.5458,
"step": 148
},
{
"epoch": 0.69,
"grad_norm": 6.843854904174805,
"learning_rate": 9.970676227938416e-06,
"loss": 0.3638,
"step": 149
},
{
"epoch": 0.69,
"grad_norm": 7.829722881317139,
"learning_rate": 9.970281691111598e-06,
"loss": 0.2724,
"step": 150
},
{
"epoch": 0.7,
"grad_norm": 10.796073913574219,
"learning_rate": 9.96988452575298e-06,
"loss": 0.2902,
"step": 151
},
{
"epoch": 0.7,
"grad_norm": 8.34335994720459,
"learning_rate": 9.9694847320726e-06,
"loss": 0.3067,
"step": 152
},
{
"epoch": 0.71,
"grad_norm": 6.335442066192627,
"learning_rate": 9.96908231028189e-06,
"loss": 0.3904,
"step": 153
},
{
"epoch": 0.71,
"grad_norm": 7.972692489624023,
"learning_rate": 9.968677260593673e-06,
"loss": 0.3659,
"step": 154
},
{
"epoch": 0.72,
"grad_norm": 7.458805561065674,
"learning_rate": 9.968269583222155e-06,
"loss": 0.5201,
"step": 155
},
{
"epoch": 0.72,
"grad_norm": 8.518513679504395,
"learning_rate": 9.967859278382939e-06,
"loss": 0.3055,
"step": 156
},
{
"epoch": 0.72,
"grad_norm": 3.929286479949951,
"learning_rate": 9.967446346293013e-06,
"loss": 0.2564,
"step": 157
},
{
"epoch": 0.73,
"grad_norm": 8.628119468688965,
"learning_rate": 9.967030787170757e-06,
"loss": 0.2978,
"step": 158
},
{
"epoch": 0.73,
"grad_norm": 7.673137187957764,
"learning_rate": 9.96661260123594e-06,
"loss": 0.4875,
"step": 159
},
{
"epoch": 0.74,
"grad_norm": 5.047264099121094,
"learning_rate": 9.966191788709716e-06,
"loss": 0.3381,
"step": 160
},
{
"epoch": 0.74,
"grad_norm": 4.241339206695557,
"learning_rate": 9.965768349814635e-06,
"loss": 0.2596,
"step": 161
},
{
"epoch": 0.75,
"grad_norm": 8.526716232299805,
"learning_rate": 9.965342284774633e-06,
"loss": 0.2951,
"step": 162
},
{
"epoch": 0.75,
"grad_norm": 8.26424503326416,
"learning_rate": 9.964913593815032e-06,
"loss": 0.4233,
"step": 163
},
{
"epoch": 0.76,
"grad_norm": 9.364177703857422,
"learning_rate": 9.964482277162547e-06,
"loss": 0.4115,
"step": 164
},
{
"epoch": 0.76,
"grad_norm": 6.392032146453857,
"learning_rate": 9.964048335045276e-06,
"loss": 0.2874,
"step": 165
},
{
"epoch": 0.77,
"grad_norm": 7.744924545288086,
"learning_rate": 9.963611767692714e-06,
"loss": 0.3342,
"step": 166
},
{
"epoch": 0.77,
"grad_norm": 7.376719951629639,
"learning_rate": 9.963172575335735e-06,
"loss": 0.3293,
"step": 167
},
{
"epoch": 0.78,
"grad_norm": 10.256063461303711,
"learning_rate": 9.962730758206612e-06,
"loss": 0.3503,
"step": 168
},
{
"epoch": 0.78,
"grad_norm": 4.126972198486328,
"learning_rate": 9.962286316538992e-06,
"loss": 0.1529,
"step": 169
},
{
"epoch": 0.78,
"grad_norm": 7.471617698669434,
"learning_rate": 9.961839250567925e-06,
"loss": 0.3363,
"step": 170
},
{
"epoch": 0.79,
"grad_norm": 6.28734827041626,
"learning_rate": 9.961389560529835e-06,
"loss": 0.3863,
"step": 171
},
{
"epoch": 0.79,
"grad_norm": 11.83276653289795,
"learning_rate": 9.960937246662546e-06,
"loss": 0.3687,
"step": 172
},
{
"epoch": 0.8,
"grad_norm": 7.140760898590088,
"learning_rate": 9.960482309205262e-06,
"loss": 0.3402,
"step": 173
},
{
"epoch": 0.8,
"grad_norm": 10.465855598449707,
"learning_rate": 9.960024748398576e-06,
"loss": 0.3292,
"step": 174
},
{
"epoch": 0.81,
"grad_norm": 12.055079460144043,
"learning_rate": 9.959564564484469e-06,
"loss": 0.3464,
"step": 175
},
{
"epoch": 0.81,
"grad_norm": 10.145856857299805,
"learning_rate": 9.959101757706308e-06,
"loss": 0.2928,
"step": 176
},
{
"epoch": 0.82,
"grad_norm": 13.143282890319824,
"learning_rate": 9.958636328308852e-06,
"loss": 0.3878,
"step": 177
},
{
"epoch": 0.82,
"grad_norm": 8.187761306762695,
"learning_rate": 9.95816827653824e-06,
"loss": 0.4443,
"step": 178
},
{
"epoch": 0.83,
"grad_norm": 8.577767372131348,
"learning_rate": 9.957697602642002e-06,
"loss": 0.5333,
"step": 179
},
{
"epoch": 0.83,
"grad_norm": 6.963403224945068,
"learning_rate": 9.957224306869053e-06,
"loss": 0.3212,
"step": 180
},
{
"epoch": 0.84,
"grad_norm": 5.820532321929932,
"learning_rate": 9.956748389469697e-06,
"loss": 0.4086,
"step": 181
},
{
"epoch": 0.84,
"grad_norm": 11.523386001586914,
"learning_rate": 9.95626985069562e-06,
"loss": 0.391,
"step": 182
},
{
"epoch": 0.85,
"grad_norm": 13.058838844299316,
"learning_rate": 9.9557886907999e-06,
"loss": 0.438,
"step": 183
},
{
"epoch": 0.85,
"grad_norm": 6.208215236663818,
"learning_rate": 9.955304910036993e-06,
"loss": 0.4455,
"step": 184
},
{
"epoch": 0.85,
"grad_norm": 7.638975620269775,
"learning_rate": 9.954818508662754e-06,
"loss": 0.5551,
"step": 185
},
{
"epoch": 0.86,
"grad_norm": 7.3545966148376465,
"learning_rate": 9.954329486934411e-06,
"loss": 0.3912,
"step": 186
},
{
"epoch": 0.86,
"grad_norm": 6.2744140625,
"learning_rate": 9.953837845110585e-06,
"loss": 0.4615,
"step": 187
},
{
"epoch": 0.87,
"grad_norm": 5.212122440338135,
"learning_rate": 9.95334358345128e-06,
"loss": 0.3151,
"step": 188
},
{
"epoch": 0.87,
"grad_norm": 11.323850631713867,
"learning_rate": 9.952846702217886e-06,
"loss": 0.4894,
"step": 189
},
{
"epoch": 0.88,
"grad_norm": 10.906036376953125,
"learning_rate": 9.952347201673181e-06,
"loss": 0.4583,
"step": 190
},
{
"epoch": 0.88,
"grad_norm": 18.138147354125977,
"learning_rate": 9.95184508208132e-06,
"loss": 0.5014,
"step": 191
},
{
"epoch": 0.89,
"grad_norm": 9.336864471435547,
"learning_rate": 9.951340343707852e-06,
"loss": 0.4421,
"step": 192
},
{
"epoch": 0.89,
"grad_norm": 6.270864963531494,
"learning_rate": 9.95083298681971e-06,
"loss": 0.2916,
"step": 193
},
{
"epoch": 0.9,
"grad_norm": 10.805733680725098,
"learning_rate": 9.950323011685206e-06,
"loss": 0.4854,
"step": 194
},
{
"epoch": 0.9,
"grad_norm": 6.747642517089844,
"learning_rate": 9.94981041857404e-06,
"loss": 0.2705,
"step": 195
},
{
"epoch": 0.91,
"grad_norm": 6.886074542999268,
"learning_rate": 9.9492952077573e-06,
"loss": 0.2599,
"step": 196
},
{
"epoch": 0.91,
"grad_norm": 18.723520278930664,
"learning_rate": 9.948777379507453e-06,
"loss": 0.5256,
"step": 197
},
{
"epoch": 0.91,
"grad_norm": 7.4744873046875,
"learning_rate": 9.948256934098353e-06,
"loss": 0.3145,
"step": 198
},
{
"epoch": 0.92,
"grad_norm": 6.738812446594238,
"learning_rate": 9.947733871805235e-06,
"loss": 0.324,
"step": 199
},
{
"epoch": 0.92,
"grad_norm": 7.052412986755371,
"learning_rate": 9.947208192904722e-06,
"loss": 0.4534,
"step": 200
},
{
"epoch": 0.93,
"grad_norm": 4.8173508644104,
"learning_rate": 9.946679897674823e-06,
"loss": 0.372,
"step": 201
},
{
"epoch": 0.93,
"grad_norm": 7.492809772491455,
"learning_rate": 9.94614898639492e-06,
"loss": 0.5313,
"step": 202
},
{
"epoch": 0.94,
"grad_norm": 7.617343425750732,
"learning_rate": 9.945615459345789e-06,
"loss": 0.4958,
"step": 203
},
{
"epoch": 0.94,
"grad_norm": 9.660953521728516,
"learning_rate": 9.945079316809585e-06,
"loss": 0.3639,
"step": 204
},
{
"epoch": 0.95,
"grad_norm": 8.472088813781738,
"learning_rate": 9.944540559069847e-06,
"loss": 0.3534,
"step": 205
},
{
"epoch": 0.95,
"grad_norm": 10.049768447875977,
"learning_rate": 9.943999186411496e-06,
"loss": 0.4788,
"step": 206
},
{
"epoch": 0.96,
"grad_norm": 6.580361366271973,
"learning_rate": 9.943455199120836e-06,
"loss": 0.3854,
"step": 207
},
{
"epoch": 0.96,
"grad_norm": 5.525043487548828,
"learning_rate": 9.942908597485558e-06,
"loss": 0.3237,
"step": 208
},
{
"epoch": 0.97,
"grad_norm": 8.51529598236084,
"learning_rate": 9.94235938179473e-06,
"loss": 0.2758,
"step": 209
},
{
"epoch": 0.97,
"grad_norm": 11.293745994567871,
"learning_rate": 9.941807552338805e-06,
"loss": 0.319,
"step": 210
},
{
"epoch": 0.97,
"grad_norm": 7.503701686859131,
"learning_rate": 9.941253109409615e-06,
"loss": 0.4216,
"step": 211
},
{
"epoch": 0.98,
"grad_norm": 8.137164115905762,
"learning_rate": 9.94069605330038e-06,
"loss": 0.391,
"step": 212
},
{
"epoch": 0.98,
"grad_norm": 5.768956661224365,
"learning_rate": 9.940136384305699e-06,
"loss": 0.3896,
"step": 213
},
{
"epoch": 0.99,
"grad_norm": 6.318339824676514,
"learning_rate": 9.939574102721552e-06,
"loss": 0.3058,
"step": 214
},
{
"epoch": 0.99,
"grad_norm": 5.358760356903076,
"learning_rate": 9.939009208845301e-06,
"loss": 0.3874,
"step": 215
},
{
"epoch": 1.0,
"grad_norm": 7.182950973510742,
"learning_rate": 9.938441702975689e-06,
"loss": 0.3189,
"step": 216
},
{
"epoch": 1.0,
"eval_loss": 0.3517548441886902,
"eval_mse": 0.35175487080665485,
"eval_runtime": 13.1487,
"eval_samples_per_second": 55.519,
"eval_steps_per_second": 27.759,
"step": 216
}
],
"logging_steps": 1.0,
"max_steps": 4320,
"num_input_tokens_seen": 0,
"num_train_epochs": 20,
"save_steps": 500,
"total_flos": 1.7659591890305024e+16,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}