Bigger-Body-12b / trainer_state.json
Fizzarolli's picture
Upload folder using huggingface_hub
e75df7d verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.0,
"eval_steps": 500,
"global_step": 920,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.008695652173913044,
"grad_norm": 14.8125,
"learning_rate": 8.000000000000001e-07,
"loss": 1.8918,
"step": 4
},
{
"epoch": 0.017391304347826087,
"grad_norm": 7.8125,
"learning_rate": 1.6000000000000001e-06,
"loss": 1.816,
"step": 8
},
{
"epoch": 0.02608695652173913,
"grad_norm": 3.171875,
"learning_rate": 2.4000000000000003e-06,
"loss": 1.7715,
"step": 12
},
{
"epoch": 0.034782608695652174,
"grad_norm": 4.0,
"learning_rate": 3.2000000000000003e-06,
"loss": 1.7775,
"step": 16
},
{
"epoch": 0.043478260869565216,
"grad_norm": 4.5625,
"learning_rate": 4.000000000000001e-06,
"loss": 1.7134,
"step": 20
},
{
"epoch": 0.05217391304347826,
"grad_norm": 5.84375,
"learning_rate": 4.800000000000001e-06,
"loss": 1.7756,
"step": 24
},
{
"epoch": 0.06086956521739131,
"grad_norm": 6.78125,
"learning_rate": 5.600000000000001e-06,
"loss": 1.7208,
"step": 28
},
{
"epoch": 0.06956521739130435,
"grad_norm": 6.65625,
"learning_rate": 6.4000000000000006e-06,
"loss": 1.6998,
"step": 32
},
{
"epoch": 0.0782608695652174,
"grad_norm": 8.75,
"learning_rate": 7.2000000000000005e-06,
"loss": 1.8301,
"step": 36
},
{
"epoch": 0.08695652173913043,
"grad_norm": 7.0625,
"learning_rate": 8.000000000000001e-06,
"loss": 1.7552,
"step": 40
},
{
"epoch": 0.09565217391304348,
"grad_norm": 4.5625,
"learning_rate": 8.8e-06,
"loss": 1.7062,
"step": 44
},
{
"epoch": 0.10434782608695652,
"grad_norm": 5.375,
"learning_rate": 9.600000000000001e-06,
"loss": 1.7292,
"step": 48
},
{
"epoch": 0.11304347826086956,
"grad_norm": 7.53125,
"learning_rate": 9.999869605476246e-06,
"loss": 1.8108,
"step": 52
},
{
"epoch": 0.12173913043478261,
"grad_norm": 9.125,
"learning_rate": 9.99882649009242e-06,
"loss": 1.8255,
"step": 56
},
{
"epoch": 0.13043478260869565,
"grad_norm": 6.78125,
"learning_rate": 9.996740476948386e-06,
"loss": 1.7476,
"step": 60
},
{
"epoch": 0.1391304347826087,
"grad_norm": 7.09375,
"learning_rate": 9.99361200124597e-06,
"loss": 1.7619,
"step": 64
},
{
"epoch": 0.14782608695652175,
"grad_norm": 4.75,
"learning_rate": 9.989441715674422e-06,
"loss": 1.7593,
"step": 68
},
{
"epoch": 0.1565217391304348,
"grad_norm": 6.21875,
"learning_rate": 9.98423049027424e-06,
"loss": 1.7353,
"step": 72
},
{
"epoch": 0.16521739130434782,
"grad_norm": 6.03125,
"learning_rate": 9.977979412255651e-06,
"loss": 1.7301,
"step": 76
},
{
"epoch": 0.17391304347826086,
"grad_norm": 6.78125,
"learning_rate": 9.970689785771798e-06,
"loss": 1.7558,
"step": 80
},
{
"epoch": 0.1826086956521739,
"grad_norm": 5.6875,
"learning_rate": 9.962363131646649e-06,
"loss": 1.8001,
"step": 84
},
{
"epoch": 0.19130434782608696,
"grad_norm": 8.875,
"learning_rate": 9.953001187057705e-06,
"loss": 1.751,
"step": 88
},
{
"epoch": 0.2,
"grad_norm": 6.3125,
"learning_rate": 9.942605905173593e-06,
"loss": 1.7262,
"step": 92
},
{
"epoch": 0.20869565217391303,
"grad_norm": 6.28125,
"learning_rate": 9.931179454746556e-06,
"loss": 1.8109,
"step": 96
},
{
"epoch": 0.21739130434782608,
"grad_norm": 6.46875,
"learning_rate": 9.918724219660013e-06,
"loss": 1.9357,
"step": 100
},
{
"epoch": 0.22608695652173913,
"grad_norm": 6.25,
"learning_rate": 9.905242798431196e-06,
"loss": 1.6968,
"step": 104
},
{
"epoch": 0.23478260869565218,
"grad_norm": 7.8125,
"learning_rate": 9.890738003669029e-06,
"loss": 1.8129,
"step": 108
},
{
"epoch": 0.24347826086956523,
"grad_norm": 5.6875,
"learning_rate": 9.87521286148734e-06,
"loss": 1.688,
"step": 112
},
{
"epoch": 0.25217391304347825,
"grad_norm": 6.34375,
"learning_rate": 9.858670610873528e-06,
"loss": 1.7948,
"step": 116
},
{
"epoch": 0.2608695652173913,
"grad_norm": 6.03125,
"learning_rate": 9.841114703012817e-06,
"loss": 1.7484,
"step": 120
},
{
"epoch": 0.26956521739130435,
"grad_norm": 5.84375,
"learning_rate": 9.822548800568238e-06,
"loss": 1.7572,
"step": 124
},
{
"epoch": 0.2782608695652174,
"grad_norm": 4.96875,
"learning_rate": 9.802976776916493e-06,
"loss": 1.6758,
"step": 128
},
{
"epoch": 0.28695652173913044,
"grad_norm": 6.0,
"learning_rate": 9.782402715339866e-06,
"loss": 1.7555,
"step": 132
},
{
"epoch": 0.2956521739130435,
"grad_norm": 5.75,
"learning_rate": 9.760830908174323e-06,
"loss": 1.7926,
"step": 136
},
{
"epoch": 0.30434782608695654,
"grad_norm": 5.0625,
"learning_rate": 9.738265855914014e-06,
"loss": 1.7392,
"step": 140
},
{
"epoch": 0.3130434782608696,
"grad_norm": 6.375,
"learning_rate": 9.714712266272339e-06,
"loss": 1.7365,
"step": 144
},
{
"epoch": 0.3217391304347826,
"grad_norm": 6.21875,
"learning_rate": 9.690175053199789e-06,
"loss": 1.8047,
"step": 148
},
{
"epoch": 0.33043478260869563,
"grad_norm": 7.03125,
"learning_rate": 9.664659335858755e-06,
"loss": 1.691,
"step": 152
},
{
"epoch": 0.3391304347826087,
"grad_norm": 5.8125,
"learning_rate": 9.63817043755552e-06,
"loss": 1.635,
"step": 156
},
{
"epoch": 0.34782608695652173,
"grad_norm": 12.1875,
"learning_rate": 9.610713884629667e-06,
"loss": 1.7294,
"step": 160
},
{
"epoch": 0.3565217391304348,
"grad_norm": 7.28125,
"learning_rate": 9.582295405301131e-06,
"loss": 1.8021,
"step": 164
},
{
"epoch": 0.3652173913043478,
"grad_norm": 6.46875,
"learning_rate": 9.552920928475127e-06,
"loss": 1.7255,
"step": 168
},
{
"epoch": 0.3739130434782609,
"grad_norm": 4.75,
"learning_rate": 9.522596582505208e-06,
"loss": 1.7484,
"step": 172
},
{
"epoch": 0.3826086956521739,
"grad_norm": 6.25,
"learning_rate": 9.491328693914723e-06,
"loss": 1.8143,
"step": 176
},
{
"epoch": 0.391304347826087,
"grad_norm": 3.921875,
"learning_rate": 9.459123786076911e-06,
"loss": 1.6716,
"step": 180
},
{
"epoch": 0.4,
"grad_norm": 6.71875,
"learning_rate": 9.425988577853959e-06,
"loss": 1.8105,
"step": 184
},
{
"epoch": 0.40869565217391307,
"grad_norm": 9.375,
"learning_rate": 9.391929982195233e-06,
"loss": 1.7907,
"step": 188
},
{
"epoch": 0.41739130434782606,
"grad_norm": 3.84375,
"learning_rate": 9.356955104695057e-06,
"loss": 1.6777,
"step": 192
},
{
"epoch": 0.4260869565217391,
"grad_norm": 5.71875,
"learning_rate": 9.321071242110275e-06,
"loss": 1.8068,
"step": 196
},
{
"epoch": 0.43478260869565216,
"grad_norm": 5.53125,
"learning_rate": 9.284285880837947e-06,
"loss": 1.718,
"step": 200
},
{
"epoch": 0.4434782608695652,
"grad_norm": 3.4375,
"learning_rate": 9.24660669535346e-06,
"loss": 1.6196,
"step": 204
},
{
"epoch": 0.45217391304347826,
"grad_norm": 2.828125,
"learning_rate": 9.208041546609424e-06,
"loss": 1.7354,
"step": 208
},
{
"epoch": 0.4608695652173913,
"grad_norm": 3.34375,
"learning_rate": 9.168598480395653e-06,
"loss": 1.7442,
"step": 212
},
{
"epoch": 0.46956521739130436,
"grad_norm": 3.6875,
"learning_rate": 9.128285725660577e-06,
"loss": 1.742,
"step": 216
},
{
"epoch": 0.4782608695652174,
"grad_norm": 4.125,
"learning_rate": 9.08711169279446e-06,
"loss": 1.8496,
"step": 220
},
{
"epoch": 0.48695652173913045,
"grad_norm": 3.5625,
"learning_rate": 9.045084971874738e-06,
"loss": 1.6437,
"step": 224
},
{
"epoch": 0.4956521739130435,
"grad_norm": 4.28125,
"learning_rate": 9.002214330873895e-06,
"loss": 1.7723,
"step": 228
},
{
"epoch": 0.5043478260869565,
"grad_norm": 2.921875,
"learning_rate": 8.958508713830212e-06,
"loss": 1.5517,
"step": 232
},
{
"epoch": 0.5130434782608696,
"grad_norm": 6.625,
"learning_rate": 8.91397723898178e-06,
"loss": 1.6924,
"step": 236
},
{
"epoch": 0.5217391304347826,
"grad_norm": 3.78125,
"learning_rate": 8.868629196864182e-06,
"loss": 1.6444,
"step": 240
},
{
"epoch": 0.5304347826086957,
"grad_norm": 3.9375,
"learning_rate": 8.82247404837222e-06,
"loss": 1.751,
"step": 244
},
{
"epoch": 0.5391304347826087,
"grad_norm": 4.28125,
"learning_rate": 8.775521422786104e-06,
"loss": 1.8369,
"step": 248
},
{
"epoch": 0.5478260869565217,
"grad_norm": 3.6875,
"learning_rate": 8.727781115762503e-06,
"loss": 1.7386,
"step": 252
},
{
"epoch": 0.5565217391304348,
"grad_norm": 4.46875,
"learning_rate": 8.679263087290903e-06,
"loss": 1.8209,
"step": 256
},
{
"epoch": 0.5652173913043478,
"grad_norm": 3.953125,
"learning_rate": 8.629977459615655e-06,
"loss": 1.7278,
"step": 260
},
{
"epoch": 0.5739130434782609,
"grad_norm": 4.53125,
"learning_rate": 8.579934515124202e-06,
"loss": 1.7605,
"step": 264
},
{
"epoch": 0.5826086956521739,
"grad_norm": 3.640625,
"learning_rate": 8.529144694201866e-06,
"loss": 1.6836,
"step": 268
},
{
"epoch": 0.591304347826087,
"grad_norm": 3.375,
"learning_rate": 8.477618593053693e-06,
"loss": 1.6158,
"step": 272
},
{
"epoch": 0.6,
"grad_norm": 4.40625,
"learning_rate": 8.425366961493784e-06,
"loss": 1.821,
"step": 276
},
{
"epoch": 0.6086956521739131,
"grad_norm": 3.71875,
"learning_rate": 8.372400700702569e-06,
"loss": 1.667,
"step": 280
},
{
"epoch": 0.6173913043478261,
"grad_norm": 3.734375,
"learning_rate": 8.318730860952523e-06,
"loss": 1.7708,
"step": 284
},
{
"epoch": 0.6260869565217392,
"grad_norm": 3.5625,
"learning_rate": 8.264368639302746e-06,
"loss": 1.6117,
"step": 288
},
{
"epoch": 0.6347826086956522,
"grad_norm": 3.609375,
"learning_rate": 8.209325377262955e-06,
"loss": 1.7218,
"step": 292
},
{
"epoch": 0.6434782608695652,
"grad_norm": 3.578125,
"learning_rate": 8.153612558427311e-06,
"loss": 1.775,
"step": 296
},
{
"epoch": 0.6521739130434783,
"grad_norm": 3.1875,
"learning_rate": 8.097241806078616e-06,
"loss": 1.5923,
"step": 300
},
{
"epoch": 0.6608695652173913,
"grad_norm": 3.78125,
"learning_rate": 8.040224880763368e-06,
"loss": 1.6577,
"step": 304
},
{
"epoch": 0.6695652173913044,
"grad_norm": 4.90625,
"learning_rate": 7.982573677838172e-06,
"loss": 1.7276,
"step": 308
},
{
"epoch": 0.6782608695652174,
"grad_norm": 3.390625,
"learning_rate": 7.92430022498803e-06,
"loss": 1.6829,
"step": 312
},
{
"epoch": 0.6869565217391305,
"grad_norm": 3.671875,
"learning_rate": 7.865416679717025e-06,
"loss": 1.7478,
"step": 316
},
{
"epoch": 0.6956521739130435,
"grad_norm": 3.46875,
"learning_rate": 7.805935326811913e-06,
"loss": 1.7896,
"step": 320
},
{
"epoch": 0.7043478260869566,
"grad_norm": 3.234375,
"learning_rate": 7.745868575779176e-06,
"loss": 1.6258,
"step": 324
},
{
"epoch": 0.7130434782608696,
"grad_norm": 3.296875,
"learning_rate": 7.685228958256036e-06,
"loss": 1.6437,
"step": 328
},
{
"epoch": 0.7217391304347827,
"grad_norm": 3.9375,
"learning_rate": 7.624029125396004e-06,
"loss": 1.7755,
"step": 332
},
{
"epoch": 0.7304347826086957,
"grad_norm": 3.171875,
"learning_rate": 7.562281845229483e-06,
"loss": 1.7433,
"step": 336
},
{
"epoch": 0.7391304347826086,
"grad_norm": 3.1875,
"learning_rate": 7.500000000000001e-06,
"loss": 1.7589,
"step": 340
},
{
"epoch": 0.7478260869565218,
"grad_norm": 3.265625,
"learning_rate": 7.437196583476597e-06,
"loss": 1.6831,
"step": 344
},
{
"epoch": 0.7565217391304347,
"grad_norm": 3.359375,
"learning_rate": 7.373884698242959e-06,
"loss": 1.7514,
"step": 348
},
{
"epoch": 0.7652173913043478,
"grad_norm": 3.359375,
"learning_rate": 7.310077552963849e-06,
"loss": 1.6971,
"step": 352
},
{
"epoch": 0.7739130434782608,
"grad_norm": 3.515625,
"learning_rate": 7.245788459629397e-06,
"loss": 1.7533,
"step": 356
},
{
"epoch": 0.782608695652174,
"grad_norm": 3.25,
"learning_rate": 7.181030830777838e-06,
"loss": 1.6642,
"step": 360
},
{
"epoch": 0.7913043478260869,
"grad_norm": 3.453125,
"learning_rate": 7.115818176697285e-06,
"loss": 1.7967,
"step": 364
},
{
"epoch": 0.8,
"grad_norm": 3.984375,
"learning_rate": 7.050164102607081e-06,
"loss": 1.8408,
"step": 368
},
{
"epoch": 0.808695652173913,
"grad_norm": 3.015625,
"learning_rate": 6.984082305819379e-06,
"loss": 1.5956,
"step": 372
},
{
"epoch": 0.8173913043478261,
"grad_norm": 3.078125,
"learning_rate": 6.9175865728814806e-06,
"loss": 1.7632,
"step": 376
},
{
"epoch": 0.8260869565217391,
"grad_norm": 3.9375,
"learning_rate": 6.850690776699574e-06,
"loss": 1.7298,
"step": 380
},
{
"epoch": 0.8347826086956521,
"grad_norm": 3.125,
"learning_rate": 6.7834088736444435e-06,
"loss": 1.7808,
"step": 384
},
{
"epoch": 0.8434782608695652,
"grad_norm": 2.703125,
"learning_rate": 6.715754900639789e-06,
"loss": 1.7084,
"step": 388
},
{
"epoch": 0.8521739130434782,
"grad_norm": 4.0,
"learning_rate": 6.647742972233703e-06,
"loss": 1.8133,
"step": 392
},
{
"epoch": 0.8608695652173913,
"grad_norm": 3.40625,
"learning_rate": 6.579387277653986e-06,
"loss": 1.7081,
"step": 396
},
{
"epoch": 0.8695652173913043,
"grad_norm": 2.953125,
"learning_rate": 6.510702077847864e-06,
"loss": 1.8246,
"step": 400
},
{
"epoch": 0.8782608695652174,
"grad_norm": 3.015625,
"learning_rate": 6.441701702506755e-06,
"loss": 1.6652,
"step": 404
},
{
"epoch": 0.8869565217391304,
"grad_norm": 2.78125,
"learning_rate": 6.372400547076675e-06,
"loss": 1.7344,
"step": 408
},
{
"epoch": 0.8956521739130435,
"grad_norm": 2.9375,
"learning_rate": 6.302813069754949e-06,
"loss": 1.7272,
"step": 412
},
{
"epoch": 0.9043478260869565,
"grad_norm": 2.9375,
"learning_rate": 6.2329537884738115e-06,
"loss": 1.7836,
"step": 416
},
{
"epoch": 0.9130434782608695,
"grad_norm": 3.09375,
"learning_rate": 6.162837277871553e-06,
"loss": 1.5905,
"step": 420
},
{
"epoch": 0.9217391304347826,
"grad_norm": 3.53125,
"learning_rate": 6.092478166251839e-06,
"loss": 1.7248,
"step": 424
},
{
"epoch": 0.9304347826086956,
"grad_norm": 3.5625,
"learning_rate": 6.021891132531825e-06,
"loss": 1.7304,
"step": 428
},
{
"epoch": 0.9391304347826087,
"grad_norm": 3.28125,
"learning_rate": 5.9510909031797135e-06,
"loss": 1.6762,
"step": 432
},
{
"epoch": 0.9478260869565217,
"grad_norm": 3.84375,
"learning_rate": 5.880092249142401e-06,
"loss": 1.6284,
"step": 436
},
{
"epoch": 0.9565217391304348,
"grad_norm": 2.828125,
"learning_rate": 5.808909982763825e-06,
"loss": 1.7868,
"step": 440
},
{
"epoch": 0.9652173913043478,
"grad_norm": 3.203125,
"learning_rate": 5.737558954694698e-06,
"loss": 1.6698,
"step": 444
},
{
"epoch": 0.9739130434782609,
"grad_norm": 3.015625,
"learning_rate": 5.666054050794234e-06,
"loss": 1.7537,
"step": 448
},
{
"epoch": 0.9826086956521739,
"grad_norm": 2.90625,
"learning_rate": 5.594410189024533e-06,
"loss": 1.6714,
"step": 452
},
{
"epoch": 0.991304347826087,
"grad_norm": 6.25,
"learning_rate": 5.522642316338268e-06,
"loss": 1.7342,
"step": 456
},
{
"epoch": 1.0,
"grad_norm": 2.53125,
"learning_rate": 5.450765405560328e-06,
"loss": 1.7088,
"step": 460
},
{
"epoch": 1.008695652173913,
"grad_norm": 2.875,
"learning_rate": 5.378794452264053e-06,
"loss": 1.6256,
"step": 464
},
{
"epoch": 1.017391304347826,
"grad_norm": 3.46875,
"learning_rate": 5.30674447164274e-06,
"loss": 1.7419,
"step": 468
},
{
"epoch": 1.0260869565217392,
"grad_norm": 3.109375,
"learning_rate": 5.234630495377035e-06,
"loss": 1.6345,
"step": 472
},
{
"epoch": 1.0347826086956522,
"grad_norm": 2.609375,
"learning_rate": 5.1624675684989035e-06,
"loss": 1.668,
"step": 476
},
{
"epoch": 1.0434782608695652,
"grad_norm": 3.03125,
"learning_rate": 5.090270746252803e-06,
"loss": 1.8276,
"step": 480
},
{
"epoch": 1.0521739130434782,
"grad_norm": 2.859375,
"learning_rate": 5.018055090954733e-06,
"loss": 1.6851,
"step": 484
},
{
"epoch": 1.0608695652173914,
"grad_norm": 2.875,
"learning_rate": 4.945835668849801e-06,
"loss": 1.7474,
"step": 488
},
{
"epoch": 1.0695652173913044,
"grad_norm": 2.96875,
"learning_rate": 4.873627546968983e-06,
"loss": 1.7416,
"step": 492
},
{
"epoch": 1.0782608695652174,
"grad_norm": 3.421875,
"learning_rate": 4.801445789985703e-06,
"loss": 1.6386,
"step": 496
},
{
"epoch": 1.0869565217391304,
"grad_norm": 2.84375,
"learning_rate": 4.729305457072913e-06,
"loss": 1.6194,
"step": 500
},
{
"epoch": 1.0956521739130434,
"grad_norm": 2.546875,
"learning_rate": 4.657221598761329e-06,
"loss": 1.6924,
"step": 504
},
{
"epoch": 1.1043478260869566,
"grad_norm": 3.109375,
"learning_rate": 4.585209253799451e-06,
"loss": 1.6767,
"step": 508
},
{
"epoch": 1.1130434782608696,
"grad_norm": 3.046875,
"learning_rate": 4.513283446016052e-06,
"loss": 1.7603,
"step": 512
},
{
"epoch": 1.1217391304347826,
"grad_norm": 2.46875,
"learning_rate": 4.441459181185786e-06,
"loss": 1.758,
"step": 516
},
{
"epoch": 1.1304347826086956,
"grad_norm": 2.859375,
"learning_rate": 4.369751443898554e-06,
"loss": 1.6899,
"step": 520
},
{
"epoch": 1.1391304347826088,
"grad_norm": 2.875,
"learning_rate": 4.298175194433279e-06,
"loss": 1.7596,
"step": 524
},
{
"epoch": 1.1478260869565218,
"grad_norm": 2.984375,
"learning_rate": 4.226745365636783e-06,
"loss": 1.7741,
"step": 528
},
{
"epoch": 1.1565217391304348,
"grad_norm": 2.71875,
"learning_rate": 4.155476859808356e-06,
"loss": 1.7412,
"step": 532
},
{
"epoch": 1.1652173913043478,
"grad_norm": 2.78125,
"learning_rate": 4.0843845455907195e-06,
"loss": 1.7079,
"step": 536
},
{
"epoch": 1.1739130434782608,
"grad_norm": 3.03125,
"learning_rate": 4.013483254868001e-06,
"loss": 1.6852,
"step": 540
},
{
"epoch": 1.182608695652174,
"grad_norm": 3.0,
"learning_rate": 3.942787779671376e-06,
"loss": 1.781,
"step": 544
},
{
"epoch": 1.191304347826087,
"grad_norm": 3.25,
"learning_rate": 3.87231286909303e-06,
"loss": 1.64,
"step": 548
},
{
"epoch": 1.2,
"grad_norm": 2.671875,
"learning_rate": 3.8020732262090876e-06,
"loss": 1.5995,
"step": 552
},
{
"epoch": 1.208695652173913,
"grad_norm": 2.875,
"learning_rate": 3.732083505012118e-06,
"loss": 1.5924,
"step": 556
},
{
"epoch": 1.2173913043478262,
"grad_norm": 2.671875,
"learning_rate": 3.662358307353897e-06,
"loss": 1.5588,
"step": 560
},
{
"epoch": 1.2260869565217392,
"grad_norm": 2.59375,
"learning_rate": 3.5929121798990494e-06,
"loss": 1.6359,
"step": 564
},
{
"epoch": 1.2347826086956522,
"grad_norm": 2.6875,
"learning_rate": 3.5237596110902007e-06,
"loss": 1.6286,
"step": 568
},
{
"epoch": 1.2434782608695651,
"grad_norm": 2.21875,
"learning_rate": 3.4549150281252635e-06,
"loss": 1.7253,
"step": 572
},
{
"epoch": 1.2521739130434781,
"grad_norm": 2.71875,
"learning_rate": 3.3863927939475274e-06,
"loss": 1.8196,
"step": 576
},
{
"epoch": 1.2608695652173914,
"grad_norm": 2.59375,
"learning_rate": 3.3182072042491244e-06,
"loss": 1.8091,
"step": 580
},
{
"epoch": 1.2695652173913043,
"grad_norm": 2.53125,
"learning_rate": 3.250372484488558e-06,
"loss": 1.772,
"step": 584
},
{
"epoch": 1.2782608695652173,
"grad_norm": 2.8125,
"learning_rate": 3.1829027869228506e-06,
"loss": 1.6914,
"step": 588
},
{
"epoch": 1.2869565217391306,
"grad_norm": 2.671875,
"learning_rate": 3.1158121876549853e-06,
"loss": 1.8261,
"step": 592
},
{
"epoch": 1.2956521739130435,
"grad_norm": 2.671875,
"learning_rate": 3.0491146836972273e-06,
"loss": 1.712,
"step": 596
},
{
"epoch": 1.3043478260869565,
"grad_norm": 2.609375,
"learning_rate": 2.982824190050958e-06,
"loss": 1.7076,
"step": 600
},
{
"epoch": 1.3130434782608695,
"grad_norm": 1.9609375,
"learning_rate": 2.9169545368035933e-06,
"loss": 1.6249,
"step": 604
},
{
"epoch": 1.3217391304347825,
"grad_norm": 2.265625,
"learning_rate": 2.8515194662432423e-06,
"loss": 1.6602,
"step": 608
},
{
"epoch": 1.3304347826086955,
"grad_norm": 2.3125,
"learning_rate": 2.7865326299916736e-06,
"loss": 1.72,
"step": 612
},
{
"epoch": 1.3391304347826087,
"grad_norm": 1.8828125,
"learning_rate": 2.7220075861561956e-06,
"loss": 1.6759,
"step": 616
},
{
"epoch": 1.3478260869565217,
"grad_norm": 2.046875,
"learning_rate": 2.65795779650105e-06,
"loss": 1.8399,
"step": 620
},
{
"epoch": 1.3565217391304347,
"grad_norm": 1.90625,
"learning_rate": 2.5943966236389028e-06,
"loss": 1.736,
"step": 624
},
{
"epoch": 1.365217391304348,
"grad_norm": 2.0625,
"learning_rate": 2.5313373282430244e-06,
"loss": 1.7337,
"step": 628
},
{
"epoch": 1.373913043478261,
"grad_norm": 2.15625,
"learning_rate": 2.46879306628073e-06,
"loss": 1.7536,
"step": 632
},
{
"epoch": 1.382608695652174,
"grad_norm": 2.015625,
"learning_rate": 2.406776886268677e-06,
"loss": 1.6754,
"step": 636
},
{
"epoch": 1.391304347826087,
"grad_norm": 2.015625,
"learning_rate": 2.345301726550567e-06,
"loss": 1.8216,
"step": 640
},
{
"epoch": 1.4,
"grad_norm": 1.8515625,
"learning_rate": 2.2843804125978356e-06,
"loss": 1.6752,
"step": 644
},
{
"epoch": 1.4086956521739131,
"grad_norm": 2.125,
"learning_rate": 2.224025654333905e-06,
"loss": 1.7638,
"step": 648
},
{
"epoch": 1.4173913043478261,
"grad_norm": 1.890625,
"learning_rate": 2.1642500434825207e-06,
"loss": 1.6406,
"step": 652
},
{
"epoch": 1.4260869565217391,
"grad_norm": 1.7578125,
"learning_rate": 2.105066050940758e-06,
"loss": 1.695,
"step": 656
},
{
"epoch": 1.434782608695652,
"grad_norm": 1.96875,
"learning_rate": 2.0464860241772454e-06,
"loss": 1.7199,
"step": 660
},
{
"epoch": 1.4434782608695653,
"grad_norm": 1.8984375,
"learning_rate": 1.9885221846561267e-06,
"loss": 1.7604,
"step": 664
},
{
"epoch": 1.4521739130434783,
"grad_norm": 1.8203125,
"learning_rate": 1.931186625287313e-06,
"loss": 1.592,
"step": 668
},
{
"epoch": 1.4608695652173913,
"grad_norm": 1.796875,
"learning_rate": 1.8744913079035598e-06,
"loss": 1.654,
"step": 672
},
{
"epoch": 1.4695652173913043,
"grad_norm": 1.9609375,
"learning_rate": 1.8184480607648863e-06,
"loss": 1.6098,
"step": 676
},
{
"epoch": 1.4782608695652173,
"grad_norm": 1.9140625,
"learning_rate": 1.7630685760908623e-06,
"loss": 1.6708,
"step": 680
},
{
"epoch": 1.4869565217391305,
"grad_norm": 2.59375,
"learning_rate": 1.7083644076212785e-06,
"loss": 1.6816,
"step": 684
},
{
"epoch": 1.4956521739130435,
"grad_norm": 2.46875,
"learning_rate": 1.6543469682057105e-06,
"loss": 1.6811,
"step": 688
},
{
"epoch": 1.5043478260869565,
"grad_norm": 2.015625,
"learning_rate": 1.6010275274224607e-06,
"loss": 1.7303,
"step": 692
},
{
"epoch": 1.5130434782608697,
"grad_norm": 1.8046875,
"learning_rate": 1.548417209227422e-06,
"loss": 1.7151,
"step": 696
},
{
"epoch": 1.5217391304347827,
"grad_norm": 1.9453125,
"learning_rate": 1.4965269896332884e-06,
"loss": 1.6849,
"step": 700
},
{
"epoch": 1.5304347826086957,
"grad_norm": 2.421875,
"learning_rate": 1.4453676944196477e-06,
"loss": 1.7114,
"step": 704
},
{
"epoch": 1.5391304347826087,
"grad_norm": 3.015625,
"learning_rate": 1.3949499968744208e-06,
"loss": 1.7589,
"step": 708
},
{
"epoch": 1.5478260869565217,
"grad_norm": 1.890625,
"learning_rate": 1.3452844155671052e-06,
"loss": 1.6126,
"step": 712
},
{
"epoch": 1.5565217391304347,
"grad_norm": 1.8203125,
"learning_rate": 1.296381312154305e-06,
"loss": 1.7336,
"step": 716
},
{
"epoch": 1.5652173913043477,
"grad_norm": 1.890625,
"learning_rate": 1.2482508892179884e-06,
"loss": 1.7618,
"step": 720
},
{
"epoch": 1.5739130434782609,
"grad_norm": 1.75,
"learning_rate": 1.2009031881369431e-06,
"loss": 1.7413,
"step": 724
},
{
"epoch": 1.5826086956521739,
"grad_norm": 1.8046875,
"learning_rate": 1.1543480869918555e-06,
"loss": 1.6844,
"step": 728
},
{
"epoch": 1.591304347826087,
"grad_norm": 1.921875,
"learning_rate": 1.1085952985044636e-06,
"loss": 1.7543,
"step": 732
},
{
"epoch": 1.6,
"grad_norm": 1.9453125,
"learning_rate": 1.0636543680112043e-06,
"loss": 1.7416,
"step": 736
},
{
"epoch": 1.608695652173913,
"grad_norm": 1.8203125,
"learning_rate": 1.0195346714717813e-06,
"loss": 1.6915,
"step": 740
},
{
"epoch": 1.617391304347826,
"grad_norm": 1.8828125,
"learning_rate": 9.762454135130828e-07,
"loss": 1.6498,
"step": 744
},
{
"epoch": 1.626086956521739,
"grad_norm": 2.421875,
"learning_rate": 9.337956255088238e-07,
"loss": 1.7677,
"step": 748
},
{
"epoch": 1.634782608695652,
"grad_norm": 1.984375,
"learning_rate": 8.921941636953435e-07,
"loss": 1.7559,
"step": 752
},
{
"epoch": 1.643478260869565,
"grad_norm": 1.7890625,
"learning_rate": 8.514497073239491e-07,
"loss": 1.8024,
"step": 756
},
{
"epoch": 1.6521739130434783,
"grad_norm": 1.8125,
"learning_rate": 8.115707568501768e-07,
"loss": 1.7869,
"step": 760
},
{
"epoch": 1.6608695652173913,
"grad_norm": 1.890625,
"learning_rate": 7.725656321603414e-07,
"loss": 1.6925,
"step": 764
},
{
"epoch": 1.6695652173913045,
"grad_norm": 2.859375,
"learning_rate": 7.344424708357867e-07,
"loss": 1.6703,
"step": 768
},
{
"epoch": 1.6782608695652175,
"grad_norm": 2.03125,
"learning_rate": 6.972092264551439e-07,
"loss": 1.745,
"step": 772
},
{
"epoch": 1.6869565217391305,
"grad_norm": 2.3125,
"learning_rate": 6.60873666934993e-07,
"loss": 1.714,
"step": 776
},
{
"epoch": 1.6956521739130435,
"grad_norm": 1.8828125,
"learning_rate": 6.254433729092518e-07,
"loss": 1.724,
"step": 780
},
{
"epoch": 1.7043478260869565,
"grad_norm": 1.9375,
"learning_rate": 5.909257361476406e-07,
"loss": 1.7012,
"step": 784
},
{
"epoch": 1.7130434782608694,
"grad_norm": 3.65625,
"learning_rate": 5.573279580135438e-07,
"loss": 1.5692,
"step": 788
},
{
"epoch": 1.7217391304347827,
"grad_norm": 1.890625,
"learning_rate": 5.246570479616103e-07,
"loss": 1.753,
"step": 792
},
{
"epoch": 1.7304347826086957,
"grad_norm": 1.7421875,
"learning_rate": 4.929198220753723e-07,
"loss": 1.6207,
"step": 796
},
{
"epoch": 1.7391304347826086,
"grad_norm": 2.171875,
"learning_rate": 4.6212290164521554e-07,
"loss": 1.7742,
"step": 800
},
{
"epoch": 1.7478260869565219,
"grad_norm": 1.7734375,
"learning_rate": 4.322727117869951e-07,
"loss": 1.832,
"step": 804
},
{
"epoch": 1.7565217391304349,
"grad_norm": 1.9609375,
"learning_rate": 4.0337548010157324e-07,
"loss": 1.7819,
"step": 808
},
{
"epoch": 1.7652173913043478,
"grad_norm": 2.5,
"learning_rate": 3.754372353755559e-07,
"loss": 1.6899,
"step": 812
},
{
"epoch": 1.7739130434782608,
"grad_norm": 2.0,
"learning_rate": 3.4846380632352463e-07,
"loss": 1.7287,
"step": 816
},
{
"epoch": 1.7826086956521738,
"grad_norm": 1.8359375,
"learning_rate": 3.224608203719953e-07,
"loss": 1.7088,
"step": 820
},
{
"epoch": 1.7913043478260868,
"grad_norm": 1.8984375,
"learning_rate": 2.974337024853802e-07,
"loss": 1.7639,
"step": 824
},
{
"epoch": 1.8,
"grad_norm": 1.90625,
"learning_rate": 2.733876740341829e-07,
"loss": 1.787,
"step": 828
},
{
"epoch": 1.808695652173913,
"grad_norm": 1.8125,
"learning_rate": 2.5032775170567293e-07,
"loss": 1.7659,
"step": 832
},
{
"epoch": 1.8173913043478263,
"grad_norm": 1.8125,
"learning_rate": 2.2825874645725942e-07,
"loss": 1.7699,
"step": 836
},
{
"epoch": 1.8260869565217392,
"grad_norm": 2.625,
"learning_rate": 2.0718526251279346e-07,
"loss": 1.7371,
"step": 840
},
{
"epoch": 1.8347826086956522,
"grad_norm": 2.046875,
"learning_rate": 1.871116964019898e-07,
"loss": 1.7021,
"step": 844
},
{
"epoch": 1.8434782608695652,
"grad_norm": 1.9765625,
"learning_rate": 1.6804223604318825e-07,
"loss": 1.69,
"step": 848
},
{
"epoch": 1.8521739130434782,
"grad_norm": 1.8671875,
"learning_rate": 1.4998085986963283e-07,
"loss": 1.639,
"step": 852
},
{
"epoch": 1.8608695652173912,
"grad_norm": 2.0,
"learning_rate": 1.329313359994633e-07,
"loss": 1.7022,
"step": 856
},
{
"epoch": 1.8695652173913042,
"grad_norm": 1.828125,
"learning_rate": 1.1689722144956672e-07,
"loss": 1.695,
"step": 860
},
{
"epoch": 1.8782608695652174,
"grad_norm": 1.859375,
"learning_rate": 1.0188186139349355e-07,
"loss": 1.7447,
"step": 864
},
{
"epoch": 1.8869565217391304,
"grad_norm": 1.78125,
"learning_rate": 8.788838846355341e-08,
"loss": 1.7429,
"step": 868
},
{
"epoch": 1.8956521739130436,
"grad_norm": 2.6875,
"learning_rate": 7.491972209725807e-08,
"loss": 1.7296,
"step": 872
},
{
"epoch": 1.9043478260869566,
"grad_norm": 1.75,
"learning_rate": 6.297856792824741e-08,
"loss": 1.6088,
"step": 876
},
{
"epoch": 1.9130434782608696,
"grad_norm": 1.78125,
"learning_rate": 5.206741722181385e-08,
"loss": 1.6972,
"step": 880
},
{
"epoch": 1.9217391304347826,
"grad_norm": 1.75,
"learning_rate": 4.2188546355153016e-08,
"loss": 1.7105,
"step": 884
},
{
"epoch": 1.9304347826086956,
"grad_norm": 1.8359375,
"learning_rate": 3.334401634245033e-08,
"loss": 1.6165,
"step": 888
},
{
"epoch": 1.9391304347826086,
"grad_norm": 1.828125,
"learning_rate": 2.5535672404890523e-08,
"loss": 1.7325,
"step": 892
},
{
"epoch": 1.9478260869565216,
"grad_norm": 2.140625,
"learning_rate": 1.8765143585693924e-08,
"loss": 1.6871,
"step": 896
},
{
"epoch": 1.9565217391304348,
"grad_norm": 1.828125,
"learning_rate": 1.3033842410251074e-08,
"loss": 1.7506,
"step": 900
},
{
"epoch": 1.9652173913043478,
"grad_norm": 1.75,
"learning_rate": 8.342964591430137e-09,
"loss": 1.6842,
"step": 904
},
{
"epoch": 1.973913043478261,
"grad_norm": 1.7578125,
"learning_rate": 4.69348878011644e-09,
"loss": 1.6753,
"step": 908
},
{
"epoch": 1.982608695652174,
"grad_norm": 2.140625,
"learning_rate": 2.0861763610385834e-09,
"loss": 1.6069,
"step": 912
},
{
"epoch": 1.991304347826087,
"grad_norm": 1.890625,
"learning_rate": 5.215712939210527e-10,
"loss": 1.7117,
"step": 916
},
{
"epoch": 2.0,
"grad_norm": 2.578125,
"learning_rate": 0.0,
"loss": 1.7504,
"step": 920
}
],
"logging_steps": 4,
"max_steps": 920,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 230,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 8.371395096477696e+18,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}