Mxode's picture
upload
5f71f07
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 9480,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0010548523206751054,
"grad_norm": 1.2733083963394165,
"learning_rate": 0.00015822784810126583,
"loss": 7.4947,
"step": 10
},
{
"epoch": 0.002109704641350211,
"grad_norm": 1.1436876058578491,
"learning_rate": 0.00031645569620253165,
"loss": 6.863,
"step": 20
},
{
"epoch": 0.0031645569620253164,
"grad_norm": 0.8426070213317871,
"learning_rate": 0.00047468354430379745,
"loss": 6.2139,
"step": 30
},
{
"epoch": 0.004219409282700422,
"grad_norm": 0.9538296461105347,
"learning_rate": 0.0006329113924050633,
"loss": 5.6992,
"step": 40
},
{
"epoch": 0.005274261603375527,
"grad_norm": 0.6879795789718628,
"learning_rate": 0.0007911392405063291,
"loss": 5.2014,
"step": 50
},
{
"epoch": 0.006329113924050633,
"grad_norm": 1.1578049659729004,
"learning_rate": 0.0009493670886075949,
"loss": 4.7168,
"step": 60
},
{
"epoch": 0.007383966244725738,
"grad_norm": 1.0756778717041016,
"learning_rate": 0.0011075949367088608,
"loss": 4.3464,
"step": 70
},
{
"epoch": 0.008438818565400843,
"grad_norm": 0.7755897641181946,
"learning_rate": 0.0012658227848101266,
"loss": 4.106,
"step": 80
},
{
"epoch": 0.00949367088607595,
"grad_norm": 1.7596473693847656,
"learning_rate": 0.0014240506329113926,
"loss": 3.9251,
"step": 90
},
{
"epoch": 0.010548523206751054,
"grad_norm": 1.3099244832992554,
"learning_rate": 0.0015,
"loss": 3.7896,
"step": 100
},
{
"epoch": 0.011603375527426161,
"grad_norm": 0.8102260231971741,
"learning_rate": 0.0015,
"loss": 3.6297,
"step": 110
},
{
"epoch": 0.012658227848101266,
"grad_norm": 1.1916205883026123,
"learning_rate": 0.0015,
"loss": 3.518,
"step": 120
},
{
"epoch": 0.013713080168776372,
"grad_norm": 1.1987881660461426,
"learning_rate": 0.0015,
"loss": 3.4118,
"step": 130
},
{
"epoch": 0.014767932489451477,
"grad_norm": 0.641861081123352,
"learning_rate": 0.0015,
"loss": 3.3128,
"step": 140
},
{
"epoch": 0.015822784810126583,
"grad_norm": 1.032217025756836,
"learning_rate": 0.0015,
"loss": 3.2296,
"step": 150
},
{
"epoch": 0.016877637130801686,
"grad_norm": 0.8954044580459595,
"learning_rate": 0.0015,
"loss": 3.1678,
"step": 160
},
{
"epoch": 0.017932489451476793,
"grad_norm": 1.1310805082321167,
"learning_rate": 0.0015,
"loss": 3.0913,
"step": 170
},
{
"epoch": 0.0189873417721519,
"grad_norm": 1.0529614686965942,
"learning_rate": 0.0015,
"loss": 3.0544,
"step": 180
},
{
"epoch": 0.020042194092827006,
"grad_norm": 0.895476758480072,
"learning_rate": 0.0015,
"loss": 2.9922,
"step": 190
},
{
"epoch": 0.02109704641350211,
"grad_norm": 0.8909080624580383,
"learning_rate": 0.0015,
"loss": 2.947,
"step": 200
},
{
"epoch": 0.022151898734177215,
"grad_norm": 0.8560699820518494,
"learning_rate": 0.0015,
"loss": 2.905,
"step": 210
},
{
"epoch": 0.023206751054852322,
"grad_norm": 0.8426381945610046,
"learning_rate": 0.0015,
"loss": 2.8668,
"step": 220
},
{
"epoch": 0.024261603375527425,
"grad_norm": 1.4895472526550293,
"learning_rate": 0.0015,
"loss": 2.8211,
"step": 230
},
{
"epoch": 0.02531645569620253,
"grad_norm": 0.8816336989402771,
"learning_rate": 0.0015,
"loss": 2.7824,
"step": 240
},
{
"epoch": 0.026371308016877638,
"grad_norm": 1.0492737293243408,
"learning_rate": 0.0015,
"loss": 2.7469,
"step": 250
},
{
"epoch": 0.027426160337552744,
"grad_norm": 0.8734013438224792,
"learning_rate": 0.0015,
"loss": 2.7114,
"step": 260
},
{
"epoch": 0.028481012658227847,
"grad_norm": 0.8980610370635986,
"learning_rate": 0.0015,
"loss": 2.6943,
"step": 270
},
{
"epoch": 0.029535864978902954,
"grad_norm": 0.8903745412826538,
"learning_rate": 0.0015,
"loss": 2.6786,
"step": 280
},
{
"epoch": 0.03059071729957806,
"grad_norm": 1.0453561544418335,
"learning_rate": 0.0015,
"loss": 2.6372,
"step": 290
},
{
"epoch": 0.03164556962025317,
"grad_norm": 0.825192928314209,
"learning_rate": 0.0015,
"loss": 2.6076,
"step": 300
},
{
"epoch": 0.03270042194092827,
"grad_norm": 0.9211034774780273,
"learning_rate": 0.0015,
"loss": 2.5851,
"step": 310
},
{
"epoch": 0.03375527426160337,
"grad_norm": 0.7711102366447449,
"learning_rate": 0.0015,
"loss": 2.5565,
"step": 320
},
{
"epoch": 0.03481012658227848,
"grad_norm": 0.9871155023574829,
"learning_rate": 0.0015,
"loss": 2.5546,
"step": 330
},
{
"epoch": 0.035864978902953586,
"grad_norm": 0.7353435158729553,
"learning_rate": 0.0015,
"loss": 2.5257,
"step": 340
},
{
"epoch": 0.03691983122362869,
"grad_norm": 0.7664971947669983,
"learning_rate": 0.0015,
"loss": 2.4865,
"step": 350
},
{
"epoch": 0.0379746835443038,
"grad_norm": 0.9120454788208008,
"learning_rate": 0.0015,
"loss": 2.4683,
"step": 360
},
{
"epoch": 0.039029535864978905,
"grad_norm": 0.8023034334182739,
"learning_rate": 0.0015,
"loss": 2.458,
"step": 370
},
{
"epoch": 0.04008438818565401,
"grad_norm": 0.8567001819610596,
"learning_rate": 0.0015,
"loss": 2.4433,
"step": 380
},
{
"epoch": 0.04113924050632911,
"grad_norm": 0.8988497257232666,
"learning_rate": 0.0015,
"loss": 2.4254,
"step": 390
},
{
"epoch": 0.04219409282700422,
"grad_norm": 0.7328128218650818,
"learning_rate": 0.0015,
"loss": 2.4024,
"step": 400
},
{
"epoch": 0.043248945147679324,
"grad_norm": 0.9598380327224731,
"learning_rate": 0.0015,
"loss": 2.3949,
"step": 410
},
{
"epoch": 0.04430379746835443,
"grad_norm": 1.0258209705352783,
"learning_rate": 0.0015,
"loss": 2.3726,
"step": 420
},
{
"epoch": 0.04535864978902954,
"grad_norm": 0.9216330647468567,
"learning_rate": 0.0015,
"loss": 2.3475,
"step": 430
},
{
"epoch": 0.046413502109704644,
"grad_norm": 0.8838155269622803,
"learning_rate": 0.0015,
"loss": 2.3373,
"step": 440
},
{
"epoch": 0.04746835443037975,
"grad_norm": 0.8373647332191467,
"learning_rate": 0.0015,
"loss": 2.3377,
"step": 450
},
{
"epoch": 0.04852320675105485,
"grad_norm": 0.8128156065940857,
"learning_rate": 0.0015,
"loss": 2.3166,
"step": 460
},
{
"epoch": 0.049578059071729956,
"grad_norm": 0.7165782451629639,
"learning_rate": 0.0015,
"loss": 2.2947,
"step": 470
},
{
"epoch": 0.05063291139240506,
"grad_norm": 1.38456392288208,
"learning_rate": 0.0015,
"loss": 2.2939,
"step": 480
},
{
"epoch": 0.05168776371308017,
"grad_norm": 0.8262339234352112,
"learning_rate": 0.0015,
"loss": 2.266,
"step": 490
},
{
"epoch": 0.052742616033755275,
"grad_norm": 1.2589967250823975,
"learning_rate": 0.0015,
"loss": 2.261,
"step": 500
},
{
"epoch": 0.05379746835443038,
"grad_norm": 0.8126940727233887,
"learning_rate": 0.0015,
"loss": 2.2492,
"step": 510
},
{
"epoch": 0.05485232067510549,
"grad_norm": 0.7197276949882507,
"learning_rate": 0.0015,
"loss": 2.2389,
"step": 520
},
{
"epoch": 0.05590717299578059,
"grad_norm": 0.8715643882751465,
"learning_rate": 0.0015,
"loss": 2.2251,
"step": 530
},
{
"epoch": 0.056962025316455694,
"grad_norm": 0.9228165745735168,
"learning_rate": 0.0015,
"loss": 2.2087,
"step": 540
},
{
"epoch": 0.0580168776371308,
"grad_norm": 0.7372336983680725,
"learning_rate": 0.0015,
"loss": 2.2102,
"step": 550
},
{
"epoch": 0.05907172995780591,
"grad_norm": 0.9682636260986328,
"learning_rate": 0.0015,
"loss": 2.1682,
"step": 560
},
{
"epoch": 0.060126582278481014,
"grad_norm": 0.9414910674095154,
"learning_rate": 0.0015,
"loss": 2.179,
"step": 570
},
{
"epoch": 0.06118143459915612,
"grad_norm": 1.2493133544921875,
"learning_rate": 0.0015,
"loss": 2.1801,
"step": 580
},
{
"epoch": 0.06223628691983123,
"grad_norm": 0.9892802238464355,
"learning_rate": 0.0015,
"loss": 2.1554,
"step": 590
},
{
"epoch": 0.06329113924050633,
"grad_norm": 0.9111932516098022,
"learning_rate": 0.0015,
"loss": 2.1365,
"step": 600
},
{
"epoch": 0.06434599156118144,
"grad_norm": 0.9228165149688721,
"learning_rate": 0.0015,
"loss": 2.138,
"step": 610
},
{
"epoch": 0.06540084388185655,
"grad_norm": 0.8384230136871338,
"learning_rate": 0.0015,
"loss": 2.1237,
"step": 620
},
{
"epoch": 0.06645569620253164,
"grad_norm": 0.8601877093315125,
"learning_rate": 0.0015,
"loss": 2.1126,
"step": 630
},
{
"epoch": 0.06751054852320675,
"grad_norm": 1.1097699403762817,
"learning_rate": 0.0015,
"loss": 2.1286,
"step": 640
},
{
"epoch": 0.06856540084388185,
"grad_norm": 0.8750981688499451,
"learning_rate": 0.0015,
"loss": 2.1135,
"step": 650
},
{
"epoch": 0.06962025316455696,
"grad_norm": 0.9543802738189697,
"learning_rate": 0.0015,
"loss": 2.1048,
"step": 660
},
{
"epoch": 0.07067510548523206,
"grad_norm": 1.188357949256897,
"learning_rate": 0.0015,
"loss": 2.0879,
"step": 670
},
{
"epoch": 0.07172995780590717,
"grad_norm": 0.6918960213661194,
"learning_rate": 0.0015,
"loss": 2.0793,
"step": 680
},
{
"epoch": 0.07278481012658228,
"grad_norm": 0.757602870464325,
"learning_rate": 0.0015,
"loss": 2.0851,
"step": 690
},
{
"epoch": 0.07383966244725738,
"grad_norm": 0.7206082344055176,
"learning_rate": 0.0015,
"loss": 2.0671,
"step": 700
},
{
"epoch": 0.07489451476793249,
"grad_norm": 0.805954098701477,
"learning_rate": 0.0015,
"loss": 2.0618,
"step": 710
},
{
"epoch": 0.0759493670886076,
"grad_norm": 0.947986900806427,
"learning_rate": 0.0015,
"loss": 2.0459,
"step": 720
},
{
"epoch": 0.0770042194092827,
"grad_norm": 1.032812237739563,
"learning_rate": 0.0015,
"loss": 2.0374,
"step": 730
},
{
"epoch": 0.07805907172995781,
"grad_norm": 0.8349412083625793,
"learning_rate": 0.0015,
"loss": 2.0463,
"step": 740
},
{
"epoch": 0.07911392405063292,
"grad_norm": 0.8331874012947083,
"learning_rate": 0.0015,
"loss": 2.0248,
"step": 750
},
{
"epoch": 0.08016877637130802,
"grad_norm": 1.1009873151779175,
"learning_rate": 0.0015,
"loss": 2.0233,
"step": 760
},
{
"epoch": 0.08122362869198312,
"grad_norm": 0.8953216671943665,
"learning_rate": 0.0015,
"loss": 2.0181,
"step": 770
},
{
"epoch": 0.08227848101265822,
"grad_norm": 0.8586803674697876,
"learning_rate": 0.0015,
"loss": 2.015,
"step": 780
},
{
"epoch": 0.08333333333333333,
"grad_norm": 1.0228350162506104,
"learning_rate": 0.0015,
"loss": 2.0196,
"step": 790
},
{
"epoch": 0.08438818565400844,
"grad_norm": 0.7717146277427673,
"learning_rate": 0.0015,
"loss": 1.9936,
"step": 800
},
{
"epoch": 0.08544303797468354,
"grad_norm": 0.7378641963005066,
"learning_rate": 0.0015,
"loss": 1.9766,
"step": 810
},
{
"epoch": 0.08649789029535865,
"grad_norm": 0.7535266876220703,
"learning_rate": 0.0015,
"loss": 1.9871,
"step": 820
},
{
"epoch": 0.08755274261603375,
"grad_norm": 1.1189807653427124,
"learning_rate": 0.0015,
"loss": 1.9945,
"step": 830
},
{
"epoch": 0.08860759493670886,
"grad_norm": 0.7239416837692261,
"learning_rate": 0.0015,
"loss": 1.9697,
"step": 840
},
{
"epoch": 0.08966244725738397,
"grad_norm": 0.6967167854309082,
"learning_rate": 0.0015,
"loss": 1.9682,
"step": 850
},
{
"epoch": 0.09071729957805907,
"grad_norm": 0.7111939787864685,
"learning_rate": 0.0015,
"loss": 1.9678,
"step": 860
},
{
"epoch": 0.09177215189873418,
"grad_norm": 0.78915935754776,
"learning_rate": 0.0015,
"loss": 1.9665,
"step": 870
},
{
"epoch": 0.09282700421940929,
"grad_norm": 0.7788413166999817,
"learning_rate": 0.0015,
"loss": 1.9597,
"step": 880
},
{
"epoch": 0.0938818565400844,
"grad_norm": 0.8729071617126465,
"learning_rate": 0.0015,
"loss": 1.9562,
"step": 890
},
{
"epoch": 0.0949367088607595,
"grad_norm": 0.8148085474967957,
"learning_rate": 0.0015,
"loss": 1.9466,
"step": 900
},
{
"epoch": 0.09599156118143459,
"grad_norm": 0.8399235010147095,
"learning_rate": 0.0015,
"loss": 1.9511,
"step": 910
},
{
"epoch": 0.0970464135021097,
"grad_norm": 0.7751302123069763,
"learning_rate": 0.0015,
"loss": 1.9419,
"step": 920
},
{
"epoch": 0.0981012658227848,
"grad_norm": 0.7782713770866394,
"learning_rate": 0.0015,
"loss": 1.9295,
"step": 930
},
{
"epoch": 0.09915611814345991,
"grad_norm": 0.7294489145278931,
"learning_rate": 0.0015,
"loss": 1.9344,
"step": 940
},
{
"epoch": 0.10021097046413502,
"grad_norm": 1.0216084718704224,
"learning_rate": 0.0015,
"loss": 1.914,
"step": 950
},
{
"epoch": 0.10126582278481013,
"grad_norm": 0.820957601070404,
"learning_rate": 0.0015,
"loss": 1.9235,
"step": 960
},
{
"epoch": 0.10232067510548523,
"grad_norm": 1.3012456893920898,
"learning_rate": 0.0015,
"loss": 1.9266,
"step": 970
},
{
"epoch": 0.10337552742616034,
"grad_norm": 0.9849552512168884,
"learning_rate": 0.0015,
"loss": 1.9044,
"step": 980
},
{
"epoch": 0.10443037974683544,
"grad_norm": 0.7841943502426147,
"learning_rate": 0.0015,
"loss": 1.9006,
"step": 990
},
{
"epoch": 0.10548523206751055,
"grad_norm": 0.7804417014122009,
"learning_rate": 0.0015,
"loss": 1.9115,
"step": 1000
},
{
"epoch": 0.10654008438818566,
"grad_norm": 0.7667819857597351,
"learning_rate": 0.0015,
"loss": 1.9019,
"step": 1010
},
{
"epoch": 0.10759493670886076,
"grad_norm": 0.9037128686904907,
"learning_rate": 0.0015,
"loss": 1.8948,
"step": 1020
},
{
"epoch": 0.10864978902953587,
"grad_norm": 0.9198371171951294,
"learning_rate": 0.0015,
"loss": 1.894,
"step": 1030
},
{
"epoch": 0.10970464135021098,
"grad_norm": 0.8214184641838074,
"learning_rate": 0.0015,
"loss": 1.885,
"step": 1040
},
{
"epoch": 0.11075949367088607,
"grad_norm": 1.170914888381958,
"learning_rate": 0.0015,
"loss": 1.8879,
"step": 1050
},
{
"epoch": 0.11181434599156118,
"grad_norm": 0.8426679372787476,
"learning_rate": 0.0015,
"loss": 1.8787,
"step": 1060
},
{
"epoch": 0.11286919831223628,
"grad_norm": 0.7770638465881348,
"learning_rate": 0.0015,
"loss": 1.8662,
"step": 1070
},
{
"epoch": 0.11392405063291139,
"grad_norm": 0.9343608617782593,
"learning_rate": 0.0015,
"loss": 1.8684,
"step": 1080
},
{
"epoch": 0.1149789029535865,
"grad_norm": 0.9789599180221558,
"learning_rate": 0.0015,
"loss": 1.8773,
"step": 1090
},
{
"epoch": 0.1160337552742616,
"grad_norm": 0.7243770360946655,
"learning_rate": 0.0015,
"loss": 1.8641,
"step": 1100
},
{
"epoch": 0.11708860759493671,
"grad_norm": 0.7385159730911255,
"learning_rate": 0.0015,
"loss": 1.8669,
"step": 1110
},
{
"epoch": 0.11814345991561181,
"grad_norm": 0.7844740748405457,
"learning_rate": 0.0015,
"loss": 1.8613,
"step": 1120
},
{
"epoch": 0.11919831223628692,
"grad_norm": 0.9194635152816772,
"learning_rate": 0.0015,
"loss": 1.8452,
"step": 1130
},
{
"epoch": 0.12025316455696203,
"grad_norm": 1.1453827619552612,
"learning_rate": 0.0015,
"loss": 1.8627,
"step": 1140
},
{
"epoch": 0.12130801687763713,
"grad_norm": 1.2072694301605225,
"learning_rate": 0.0015,
"loss": 1.8435,
"step": 1150
},
{
"epoch": 0.12236286919831224,
"grad_norm": 0.8786587119102478,
"learning_rate": 0.0015,
"loss": 1.8549,
"step": 1160
},
{
"epoch": 0.12341772151898735,
"grad_norm": 1.1161935329437256,
"learning_rate": 0.0015,
"loss": 1.8387,
"step": 1170
},
{
"epoch": 0.12447257383966245,
"grad_norm": 0.7054557800292969,
"learning_rate": 0.0015,
"loss": 1.8249,
"step": 1180
},
{
"epoch": 0.12552742616033755,
"grad_norm": 0.7684183716773987,
"learning_rate": 0.0015,
"loss": 1.8271,
"step": 1190
},
{
"epoch": 0.12658227848101267,
"grad_norm": 1.4159756898880005,
"learning_rate": 0.0015,
"loss": 1.8482,
"step": 1200
},
{
"epoch": 0.12763713080168776,
"grad_norm": 0.8124328255653381,
"learning_rate": 0.0015,
"loss": 1.8465,
"step": 1210
},
{
"epoch": 0.12869198312236288,
"grad_norm": 0.7402304410934448,
"learning_rate": 0.0015,
"loss": 1.8163,
"step": 1220
},
{
"epoch": 0.12974683544303797,
"grad_norm": 0.7972971200942993,
"learning_rate": 0.0015,
"loss": 1.8079,
"step": 1230
},
{
"epoch": 0.1308016877637131,
"grad_norm": 0.7267691493034363,
"learning_rate": 0.0015,
"loss": 1.8246,
"step": 1240
},
{
"epoch": 0.13185654008438819,
"grad_norm": 0.7364758253097534,
"learning_rate": 0.0015,
"loss": 1.8262,
"step": 1250
},
{
"epoch": 0.13291139240506328,
"grad_norm": 1.1476126909255981,
"learning_rate": 0.0015,
"loss": 1.8125,
"step": 1260
},
{
"epoch": 0.1339662447257384,
"grad_norm": 0.7530903220176697,
"learning_rate": 0.0015,
"loss": 1.807,
"step": 1270
},
{
"epoch": 0.1350210970464135,
"grad_norm": 0.6699790954589844,
"learning_rate": 0.0015,
"loss": 1.8252,
"step": 1280
},
{
"epoch": 0.1360759493670886,
"grad_norm": 0.678423285484314,
"learning_rate": 0.0015,
"loss": 1.8093,
"step": 1290
},
{
"epoch": 0.1371308016877637,
"grad_norm": 0.6942156553268433,
"learning_rate": 0.0015,
"loss": 1.7993,
"step": 1300
},
{
"epoch": 0.13818565400843882,
"grad_norm": 0.8321163058280945,
"learning_rate": 0.0015,
"loss": 1.8011,
"step": 1310
},
{
"epoch": 0.13924050632911392,
"grad_norm": 0.8527196645736694,
"learning_rate": 0.0015,
"loss": 1.8064,
"step": 1320
},
{
"epoch": 0.14029535864978904,
"grad_norm": 0.9708861708641052,
"learning_rate": 0.0015,
"loss": 1.798,
"step": 1330
},
{
"epoch": 0.14135021097046413,
"grad_norm": 0.8005797266960144,
"learning_rate": 0.0015,
"loss": 1.7978,
"step": 1340
},
{
"epoch": 0.14240506329113925,
"grad_norm": 0.712599515914917,
"learning_rate": 0.0015,
"loss": 1.801,
"step": 1350
},
{
"epoch": 0.14345991561181434,
"grad_norm": 0.6891728043556213,
"learning_rate": 0.0015,
"loss": 1.7894,
"step": 1360
},
{
"epoch": 0.14451476793248946,
"grad_norm": 0.7310248017311096,
"learning_rate": 0.0015,
"loss": 1.7882,
"step": 1370
},
{
"epoch": 0.14556962025316456,
"grad_norm": 0.8893153667449951,
"learning_rate": 0.0015,
"loss": 1.7768,
"step": 1380
},
{
"epoch": 0.14662447257383968,
"grad_norm": 1.3351496458053589,
"learning_rate": 0.0015,
"loss": 1.7898,
"step": 1390
},
{
"epoch": 0.14767932489451477,
"grad_norm": 0.8466930985450745,
"learning_rate": 0.0015,
"loss": 1.7748,
"step": 1400
},
{
"epoch": 0.14873417721518986,
"grad_norm": 0.7071364521980286,
"learning_rate": 0.0015,
"loss": 1.7688,
"step": 1410
},
{
"epoch": 0.14978902953586498,
"grad_norm": 0.731139063835144,
"learning_rate": 0.0015,
"loss": 1.779,
"step": 1420
},
{
"epoch": 0.15084388185654007,
"grad_norm": 0.8124088048934937,
"learning_rate": 0.0015,
"loss": 1.7746,
"step": 1430
},
{
"epoch": 0.1518987341772152,
"grad_norm": 0.7471673488616943,
"learning_rate": 0.0015,
"loss": 1.7678,
"step": 1440
},
{
"epoch": 0.1529535864978903,
"grad_norm": 0.6360813975334167,
"learning_rate": 0.0015,
"loss": 1.768,
"step": 1450
},
{
"epoch": 0.1540084388185654,
"grad_norm": 0.808580219745636,
"learning_rate": 0.0015,
"loss": 1.7657,
"step": 1460
},
{
"epoch": 0.1550632911392405,
"grad_norm": 0.7080631256103516,
"learning_rate": 0.0015,
"loss": 1.7617,
"step": 1470
},
{
"epoch": 0.15611814345991562,
"grad_norm": 0.7757136225700378,
"learning_rate": 0.0015,
"loss": 1.7632,
"step": 1480
},
{
"epoch": 0.1571729957805907,
"grad_norm": 0.6861632466316223,
"learning_rate": 0.0015,
"loss": 1.7612,
"step": 1490
},
{
"epoch": 0.15822784810126583,
"grad_norm": 0.7646285891532898,
"learning_rate": 0.0015,
"loss": 1.7608,
"step": 1500
},
{
"epoch": 0.15928270042194093,
"grad_norm": 0.6651663184165955,
"learning_rate": 0.0015,
"loss": 1.7609,
"step": 1510
},
{
"epoch": 0.16033755274261605,
"grad_norm": 0.631038248538971,
"learning_rate": 0.0015,
"loss": 1.7577,
"step": 1520
},
{
"epoch": 0.16139240506329114,
"grad_norm": 0.6468666791915894,
"learning_rate": 0.0015,
"loss": 1.7421,
"step": 1530
},
{
"epoch": 0.16244725738396623,
"grad_norm": 0.7991131544113159,
"learning_rate": 0.0015,
"loss": 1.736,
"step": 1540
},
{
"epoch": 0.16350210970464135,
"grad_norm": 0.7404541969299316,
"learning_rate": 0.0015,
"loss": 1.7381,
"step": 1550
},
{
"epoch": 0.16455696202531644,
"grad_norm": 0.6834830045700073,
"learning_rate": 0.0015,
"loss": 1.7392,
"step": 1560
},
{
"epoch": 0.16561181434599156,
"grad_norm": 0.7333276271820068,
"learning_rate": 0.0015,
"loss": 1.7336,
"step": 1570
},
{
"epoch": 0.16666666666666666,
"grad_norm": 0.653140664100647,
"learning_rate": 0.0015,
"loss": 1.7429,
"step": 1580
},
{
"epoch": 0.16772151898734178,
"grad_norm": 0.8318632245063782,
"learning_rate": 0.0015,
"loss": 1.7341,
"step": 1590
},
{
"epoch": 0.16877637130801687,
"grad_norm": 0.6731577515602112,
"learning_rate": 0.0015,
"loss": 1.742,
"step": 1600
},
{
"epoch": 0.169831223628692,
"grad_norm": 0.6578022837638855,
"learning_rate": 0.0015,
"loss": 1.7408,
"step": 1610
},
{
"epoch": 0.17088607594936708,
"grad_norm": 0.6312196850776672,
"learning_rate": 0.0015,
"loss": 1.7064,
"step": 1620
},
{
"epoch": 0.1719409282700422,
"grad_norm": 0.7015892863273621,
"learning_rate": 0.0015,
"loss": 1.7202,
"step": 1630
},
{
"epoch": 0.1729957805907173,
"grad_norm": 0.7106109857559204,
"learning_rate": 0.0015,
"loss": 1.7256,
"step": 1640
},
{
"epoch": 0.17405063291139242,
"grad_norm": 1.1778593063354492,
"learning_rate": 0.0015,
"loss": 1.7149,
"step": 1650
},
{
"epoch": 0.1751054852320675,
"grad_norm": 0.6946284174919128,
"learning_rate": 0.0015,
"loss": 1.7173,
"step": 1660
},
{
"epoch": 0.17616033755274263,
"grad_norm": 0.815626859664917,
"learning_rate": 0.0015,
"loss": 1.7258,
"step": 1670
},
{
"epoch": 0.17721518987341772,
"grad_norm": 1.0013611316680908,
"learning_rate": 0.0015,
"loss": 1.7098,
"step": 1680
},
{
"epoch": 0.17827004219409281,
"grad_norm": 0.7599984407424927,
"learning_rate": 0.0015,
"loss": 1.707,
"step": 1690
},
{
"epoch": 0.17932489451476794,
"grad_norm": 0.7056793570518494,
"learning_rate": 0.0015,
"loss": 1.7213,
"step": 1700
},
{
"epoch": 0.18037974683544303,
"grad_norm": 0.7815384268760681,
"learning_rate": 0.0015,
"loss": 1.7043,
"step": 1710
},
{
"epoch": 0.18143459915611815,
"grad_norm": 0.644148051738739,
"learning_rate": 0.0015,
"loss": 1.7032,
"step": 1720
},
{
"epoch": 0.18248945147679324,
"grad_norm": 0.6395612359046936,
"learning_rate": 0.0015,
"loss": 1.7054,
"step": 1730
},
{
"epoch": 0.18354430379746836,
"grad_norm": 0.7701711654663086,
"learning_rate": 0.0015,
"loss": 1.7077,
"step": 1740
},
{
"epoch": 0.18459915611814345,
"grad_norm": 0.633503794670105,
"learning_rate": 0.0015,
"loss": 1.7257,
"step": 1750
},
{
"epoch": 0.18565400843881857,
"grad_norm": 0.7417237162590027,
"learning_rate": 0.0015,
"loss": 1.7082,
"step": 1760
},
{
"epoch": 0.18670886075949367,
"grad_norm": 0.6460136771202087,
"learning_rate": 0.0015,
"loss": 1.7074,
"step": 1770
},
{
"epoch": 0.1877637130801688,
"grad_norm": 0.724323034286499,
"learning_rate": 0.0015,
"loss": 1.7083,
"step": 1780
},
{
"epoch": 0.18881856540084388,
"grad_norm": 0.9442452192306519,
"learning_rate": 0.0015,
"loss": 1.7095,
"step": 1790
},
{
"epoch": 0.189873417721519,
"grad_norm": 0.6979749202728271,
"learning_rate": 0.0015,
"loss": 1.7037,
"step": 1800
},
{
"epoch": 0.1909282700421941,
"grad_norm": 0.7219043970108032,
"learning_rate": 0.0015,
"loss": 1.6912,
"step": 1810
},
{
"epoch": 0.19198312236286919,
"grad_norm": 0.7260817885398865,
"learning_rate": 0.0015,
"loss": 1.6921,
"step": 1820
},
{
"epoch": 0.1930379746835443,
"grad_norm": 0.7995448708534241,
"learning_rate": 0.0015,
"loss": 1.689,
"step": 1830
},
{
"epoch": 0.1940928270042194,
"grad_norm": 0.6635895371437073,
"learning_rate": 0.0015,
"loss": 1.6862,
"step": 1840
},
{
"epoch": 0.19514767932489452,
"grad_norm": 0.9861568212509155,
"learning_rate": 0.0015,
"loss": 1.6892,
"step": 1850
},
{
"epoch": 0.1962025316455696,
"grad_norm": 0.6414808630943298,
"learning_rate": 0.0015,
"loss": 1.6958,
"step": 1860
},
{
"epoch": 0.19725738396624473,
"grad_norm": 0.6641356945037842,
"learning_rate": 0.0015,
"loss": 1.6909,
"step": 1870
},
{
"epoch": 0.19831223628691982,
"grad_norm": 0.6749581098556519,
"learning_rate": 0.0015,
"loss": 1.6807,
"step": 1880
},
{
"epoch": 0.19936708860759494,
"grad_norm": 1.255850076675415,
"learning_rate": 0.0015,
"loss": 1.6886,
"step": 1890
},
{
"epoch": 0.20042194092827004,
"grad_norm": 0.97452312707901,
"learning_rate": 0.0015,
"loss": 1.6868,
"step": 1900
},
{
"epoch": 0.20147679324894516,
"grad_norm": 0.6632014513015747,
"learning_rate": 0.0015,
"loss": 1.6869,
"step": 1910
},
{
"epoch": 0.20253164556962025,
"grad_norm": 0.700194776058197,
"learning_rate": 0.0015,
"loss": 1.6798,
"step": 1920
},
{
"epoch": 0.20358649789029537,
"grad_norm": 0.7002797722816467,
"learning_rate": 0.0015,
"loss": 1.6809,
"step": 1930
},
{
"epoch": 0.20464135021097046,
"grad_norm": 0.6715006232261658,
"learning_rate": 0.0015,
"loss": 1.6699,
"step": 1940
},
{
"epoch": 0.20569620253164558,
"grad_norm": 0.6772109866142273,
"learning_rate": 0.0015,
"loss": 1.671,
"step": 1950
},
{
"epoch": 0.20675105485232068,
"grad_norm": 0.6926902532577515,
"learning_rate": 0.0015,
"loss": 1.6728,
"step": 1960
},
{
"epoch": 0.20780590717299577,
"grad_norm": 0.76792311668396,
"learning_rate": 0.0015,
"loss": 1.6834,
"step": 1970
},
{
"epoch": 0.2088607594936709,
"grad_norm": 0.7201133966445923,
"learning_rate": 0.0015,
"loss": 1.6662,
"step": 1980
},
{
"epoch": 0.20991561181434598,
"grad_norm": 0.6740146279335022,
"learning_rate": 0.0015,
"loss": 1.663,
"step": 1990
},
{
"epoch": 0.2109704641350211,
"grad_norm": 0.8047731518745422,
"learning_rate": 0.0015,
"loss": 1.6628,
"step": 2000
},
{
"epoch": 0.2120253164556962,
"grad_norm": 0.6108027100563049,
"learning_rate": 0.0015,
"loss": 1.6727,
"step": 2010
},
{
"epoch": 0.21308016877637131,
"grad_norm": 0.7143127918243408,
"learning_rate": 0.0015,
"loss": 1.6795,
"step": 2020
},
{
"epoch": 0.2141350210970464,
"grad_norm": 0.659460186958313,
"learning_rate": 0.0015,
"loss": 1.6695,
"step": 2030
},
{
"epoch": 0.21518987341772153,
"grad_norm": 0.6175896525382996,
"learning_rate": 0.0015,
"loss": 1.6655,
"step": 2040
},
{
"epoch": 0.21624472573839662,
"grad_norm": 0.679816484451294,
"learning_rate": 0.0015,
"loss": 1.6627,
"step": 2050
},
{
"epoch": 0.21729957805907174,
"grad_norm": 0.8619924783706665,
"learning_rate": 0.0015,
"loss": 1.6542,
"step": 2060
},
{
"epoch": 0.21835443037974683,
"grad_norm": 0.9540325999259949,
"learning_rate": 0.0015,
"loss": 1.658,
"step": 2070
},
{
"epoch": 0.21940928270042195,
"grad_norm": 0.8972859382629395,
"learning_rate": 0.0015,
"loss": 1.6663,
"step": 2080
},
{
"epoch": 0.22046413502109705,
"grad_norm": 0.8370799422264099,
"learning_rate": 0.0015,
"loss": 1.6517,
"step": 2090
},
{
"epoch": 0.22151898734177214,
"grad_norm": 1.10194993019104,
"learning_rate": 0.0015,
"loss": 1.6609,
"step": 2100
},
{
"epoch": 0.22257383966244726,
"grad_norm": 0.6842101216316223,
"learning_rate": 0.0015,
"loss": 1.6482,
"step": 2110
},
{
"epoch": 0.22362869198312235,
"grad_norm": 0.6499814987182617,
"learning_rate": 0.0015,
"loss": 1.6483,
"step": 2120
},
{
"epoch": 0.22468354430379747,
"grad_norm": 0.6887125372886658,
"learning_rate": 0.0015,
"loss": 1.6539,
"step": 2130
},
{
"epoch": 0.22573839662447256,
"grad_norm": 0.6073333621025085,
"learning_rate": 0.0015,
"loss": 1.6446,
"step": 2140
},
{
"epoch": 0.22679324894514769,
"grad_norm": 0.8369203209877014,
"learning_rate": 0.0015,
"loss": 1.6538,
"step": 2150
},
{
"epoch": 0.22784810126582278,
"grad_norm": 0.8322305679321289,
"learning_rate": 0.0015,
"loss": 1.6669,
"step": 2160
},
{
"epoch": 0.2289029535864979,
"grad_norm": 0.630450427532196,
"learning_rate": 0.0015,
"loss": 1.6479,
"step": 2170
},
{
"epoch": 0.229957805907173,
"grad_norm": 0.5693684816360474,
"learning_rate": 0.0015,
"loss": 1.6326,
"step": 2180
},
{
"epoch": 0.2310126582278481,
"grad_norm": 0.6728575825691223,
"learning_rate": 0.0015,
"loss": 1.6506,
"step": 2190
},
{
"epoch": 0.2320675105485232,
"grad_norm": 0.6408063173294067,
"learning_rate": 0.0015,
"loss": 1.6517,
"step": 2200
},
{
"epoch": 0.23312236286919832,
"grad_norm": 0.6169241666793823,
"learning_rate": 0.0015,
"loss": 1.6369,
"step": 2210
},
{
"epoch": 0.23417721518987342,
"grad_norm": 0.7348849177360535,
"learning_rate": 0.0015,
"loss": 1.6472,
"step": 2220
},
{
"epoch": 0.23523206751054854,
"grad_norm": 0.7338847517967224,
"learning_rate": 0.0015,
"loss": 1.6387,
"step": 2230
},
{
"epoch": 0.23628691983122363,
"grad_norm": 0.6961486339569092,
"learning_rate": 0.0015,
"loss": 1.6371,
"step": 2240
},
{
"epoch": 0.23734177215189872,
"grad_norm": 0.6590170860290527,
"learning_rate": 0.0015,
"loss": 1.6378,
"step": 2250
},
{
"epoch": 0.23839662447257384,
"grad_norm": 0.9444085359573364,
"learning_rate": 0.0015,
"loss": 1.6307,
"step": 2260
},
{
"epoch": 0.23945147679324894,
"grad_norm": 0.7793636322021484,
"learning_rate": 0.0015,
"loss": 1.6595,
"step": 2270
},
{
"epoch": 0.24050632911392406,
"grad_norm": 0.8052467107772827,
"learning_rate": 0.0015,
"loss": 1.6425,
"step": 2280
},
{
"epoch": 0.24156118143459915,
"grad_norm": 1.3532497882843018,
"learning_rate": 0.0015,
"loss": 1.6338,
"step": 2290
},
{
"epoch": 0.24261603375527427,
"grad_norm": 0.72308748960495,
"learning_rate": 0.0015,
"loss": 1.6297,
"step": 2300
},
{
"epoch": 0.24367088607594936,
"grad_norm": 0.7474813461303711,
"learning_rate": 0.0015,
"loss": 1.6324,
"step": 2310
},
{
"epoch": 0.24472573839662448,
"grad_norm": 0.6758801937103271,
"learning_rate": 0.0015,
"loss": 1.6367,
"step": 2320
},
{
"epoch": 0.24578059071729957,
"grad_norm": 0.7427731156349182,
"learning_rate": 0.0015,
"loss": 1.621,
"step": 2330
},
{
"epoch": 0.2468354430379747,
"grad_norm": 0.83324134349823,
"learning_rate": 0.0015,
"loss": 1.6173,
"step": 2340
},
{
"epoch": 0.2478902953586498,
"grad_norm": 0.5680593252182007,
"learning_rate": 0.0015,
"loss": 1.6205,
"step": 2350
},
{
"epoch": 0.2489451476793249,
"grad_norm": 1.4345910549163818,
"learning_rate": 0.0015,
"loss": 1.6344,
"step": 2360
},
{
"epoch": 0.25,
"grad_norm": 0.8737362027168274,
"learning_rate": 0.0015,
"loss": 1.6286,
"step": 2370
},
{
"epoch": 0.2510548523206751,
"grad_norm": 0.6703172326087952,
"learning_rate": 0.0015,
"loss": 1.6177,
"step": 2380
},
{
"epoch": 0.2521097046413502,
"grad_norm": 0.6273900866508484,
"learning_rate": 0.0015,
"loss": 1.6223,
"step": 2390
},
{
"epoch": 0.25316455696202533,
"grad_norm": 0.5641729831695557,
"learning_rate": 0.0015,
"loss": 1.6217,
"step": 2400
},
{
"epoch": 0.2542194092827004,
"grad_norm": 0.6888928413391113,
"learning_rate": 0.0015,
"loss": 1.6212,
"step": 2410
},
{
"epoch": 0.2552742616033755,
"grad_norm": 0.8733038902282715,
"learning_rate": 0.0015,
"loss": 1.6177,
"step": 2420
},
{
"epoch": 0.2563291139240506,
"grad_norm": 0.706615686416626,
"learning_rate": 0.0015,
"loss": 1.6163,
"step": 2430
},
{
"epoch": 0.25738396624472576,
"grad_norm": 0.9507734179496765,
"learning_rate": 0.0015,
"loss": 1.6272,
"step": 2440
},
{
"epoch": 0.25843881856540085,
"grad_norm": 0.6897843480110168,
"learning_rate": 0.0015,
"loss": 1.6123,
"step": 2450
},
{
"epoch": 0.25949367088607594,
"grad_norm": 0.6075423955917358,
"learning_rate": 0.0015,
"loss": 1.6286,
"step": 2460
},
{
"epoch": 0.26054852320675104,
"grad_norm": 0.785085141658783,
"learning_rate": 0.0015,
"loss": 1.6158,
"step": 2470
},
{
"epoch": 0.2616033755274262,
"grad_norm": 0.641060471534729,
"learning_rate": 0.0015,
"loss": 1.6221,
"step": 2480
},
{
"epoch": 0.2626582278481013,
"grad_norm": 0.5829103589057922,
"learning_rate": 0.0015,
"loss": 1.6084,
"step": 2490
},
{
"epoch": 0.26371308016877637,
"grad_norm": 0.674933910369873,
"learning_rate": 0.0015,
"loss": 1.6119,
"step": 2500
},
{
"epoch": 0.26476793248945146,
"grad_norm": 0.6346490383148193,
"learning_rate": 0.0015,
"loss": 1.6124,
"step": 2510
},
{
"epoch": 0.26582278481012656,
"grad_norm": 0.7300370335578918,
"learning_rate": 0.0015,
"loss": 1.6146,
"step": 2520
},
{
"epoch": 0.2668776371308017,
"grad_norm": 0.658406674861908,
"learning_rate": 0.0015,
"loss": 1.6085,
"step": 2530
},
{
"epoch": 0.2679324894514768,
"grad_norm": 0.7472488284111023,
"learning_rate": 0.0015,
"loss": 1.6128,
"step": 2540
},
{
"epoch": 0.2689873417721519,
"grad_norm": 1.044743537902832,
"learning_rate": 0.0015,
"loss": 1.6066,
"step": 2550
},
{
"epoch": 0.270042194092827,
"grad_norm": 0.66208416223526,
"learning_rate": 0.0015,
"loss": 1.612,
"step": 2560
},
{
"epoch": 0.27109704641350213,
"grad_norm": 0.6711663603782654,
"learning_rate": 0.0015,
"loss": 1.6071,
"step": 2570
},
{
"epoch": 0.2721518987341772,
"grad_norm": 0.6813085675239563,
"learning_rate": 0.0015,
"loss": 1.6056,
"step": 2580
},
{
"epoch": 0.2732067510548523,
"grad_norm": 0.6887850761413574,
"learning_rate": 0.0015,
"loss": 1.6123,
"step": 2590
},
{
"epoch": 0.2742616033755274,
"grad_norm": 0.6657094359397888,
"learning_rate": 0.0015,
"loss": 1.6138,
"step": 2600
},
{
"epoch": 0.27531645569620256,
"grad_norm": 0.7057452201843262,
"learning_rate": 0.0015,
"loss": 1.6078,
"step": 2610
},
{
"epoch": 0.27637130801687765,
"grad_norm": 0.6831801533699036,
"learning_rate": 0.0015,
"loss": 1.6115,
"step": 2620
},
{
"epoch": 0.27742616033755274,
"grad_norm": 0.6510992646217346,
"learning_rate": 0.0015,
"loss": 1.5975,
"step": 2630
},
{
"epoch": 0.27848101265822783,
"grad_norm": 0.6334524750709534,
"learning_rate": 0.0015,
"loss": 1.6097,
"step": 2640
},
{
"epoch": 0.2795358649789029,
"grad_norm": 0.7811564803123474,
"learning_rate": 0.0015,
"loss": 1.6003,
"step": 2650
},
{
"epoch": 0.2805907172995781,
"grad_norm": 0.7001844644546509,
"learning_rate": 0.0015,
"loss": 1.5961,
"step": 2660
},
{
"epoch": 0.28164556962025317,
"grad_norm": 0.7368627786636353,
"learning_rate": 0.0015,
"loss": 1.5994,
"step": 2670
},
{
"epoch": 0.28270042194092826,
"grad_norm": 0.5861145853996277,
"learning_rate": 0.0015,
"loss": 1.5974,
"step": 2680
},
{
"epoch": 0.28375527426160335,
"grad_norm": 0.7447869181632996,
"learning_rate": 0.0015,
"loss": 1.5949,
"step": 2690
},
{
"epoch": 0.2848101265822785,
"grad_norm": 0.649631917476654,
"learning_rate": 0.0015,
"loss": 1.5926,
"step": 2700
},
{
"epoch": 0.2858649789029536,
"grad_norm": 0.6203135251998901,
"learning_rate": 0.0015,
"loss": 1.6008,
"step": 2710
},
{
"epoch": 0.2869198312236287,
"grad_norm": 0.5769614577293396,
"learning_rate": 0.0015,
"loss": 1.5992,
"step": 2720
},
{
"epoch": 0.2879746835443038,
"grad_norm": 0.6501977443695068,
"learning_rate": 0.0015,
"loss": 1.5786,
"step": 2730
},
{
"epoch": 0.2890295358649789,
"grad_norm": 0.7535536885261536,
"learning_rate": 0.0015,
"loss": 1.5914,
"step": 2740
},
{
"epoch": 0.290084388185654,
"grad_norm": 0.648307204246521,
"learning_rate": 0.0015,
"loss": 1.597,
"step": 2750
},
{
"epoch": 0.2911392405063291,
"grad_norm": 0.8430402278900146,
"learning_rate": 0.0015,
"loss": 1.6032,
"step": 2760
},
{
"epoch": 0.2921940928270042,
"grad_norm": 0.5858532190322876,
"learning_rate": 0.0015,
"loss": 1.5979,
"step": 2770
},
{
"epoch": 0.29324894514767935,
"grad_norm": 0.6397041082382202,
"learning_rate": 0.0015,
"loss": 1.5937,
"step": 2780
},
{
"epoch": 0.29430379746835444,
"grad_norm": 0.5706927180290222,
"learning_rate": 0.0015,
"loss": 1.5927,
"step": 2790
},
{
"epoch": 0.29535864978902954,
"grad_norm": 0.5816206336021423,
"learning_rate": 0.0015,
"loss": 1.5908,
"step": 2800
},
{
"epoch": 0.29641350210970463,
"grad_norm": 0.5801464915275574,
"learning_rate": 0.0015,
"loss": 1.5913,
"step": 2810
},
{
"epoch": 0.2974683544303797,
"grad_norm": 0.6373363733291626,
"learning_rate": 0.0015,
"loss": 1.5975,
"step": 2820
},
{
"epoch": 0.29852320675105487,
"grad_norm": 1.0975168943405151,
"learning_rate": 0.0015,
"loss": 1.5746,
"step": 2830
},
{
"epoch": 0.29957805907172996,
"grad_norm": 0.7918742299079895,
"learning_rate": 0.0015,
"loss": 1.58,
"step": 2840
},
{
"epoch": 0.30063291139240506,
"grad_norm": 0.6820138096809387,
"learning_rate": 0.0015,
"loss": 1.5792,
"step": 2850
},
{
"epoch": 0.30168776371308015,
"grad_norm": 0.5886484980583191,
"learning_rate": 0.0015,
"loss": 1.5813,
"step": 2860
},
{
"epoch": 0.3027426160337553,
"grad_norm": 0.7658937573432922,
"learning_rate": 0.0015,
"loss": 1.5878,
"step": 2870
},
{
"epoch": 0.3037974683544304,
"grad_norm": 0.6914458870887756,
"learning_rate": 0.0015,
"loss": 1.5735,
"step": 2880
},
{
"epoch": 0.3048523206751055,
"grad_norm": 0.695576548576355,
"learning_rate": 0.0015,
"loss": 1.5791,
"step": 2890
},
{
"epoch": 0.3059071729957806,
"grad_norm": 0.7019651532173157,
"learning_rate": 0.0015,
"loss": 1.5838,
"step": 2900
},
{
"epoch": 0.3069620253164557,
"grad_norm": 0.6431899070739746,
"learning_rate": 0.0015,
"loss": 1.585,
"step": 2910
},
{
"epoch": 0.3080168776371308,
"grad_norm": 0.6427409052848816,
"learning_rate": 0.0015,
"loss": 1.5778,
"step": 2920
},
{
"epoch": 0.3090717299578059,
"grad_norm": 0.5970958471298218,
"learning_rate": 0.0015,
"loss": 1.5728,
"step": 2930
},
{
"epoch": 0.310126582278481,
"grad_norm": 1.1294201612472534,
"learning_rate": 0.0015,
"loss": 1.5833,
"step": 2940
},
{
"epoch": 0.3111814345991561,
"grad_norm": 0.622044026851654,
"learning_rate": 0.0015,
"loss": 1.591,
"step": 2950
},
{
"epoch": 0.31223628691983124,
"grad_norm": 0.6812940835952759,
"learning_rate": 0.0015,
"loss": 1.5679,
"step": 2960
},
{
"epoch": 0.31329113924050633,
"grad_norm": 0.8392590880393982,
"learning_rate": 0.0015,
"loss": 1.5605,
"step": 2970
},
{
"epoch": 0.3143459915611814,
"grad_norm": 0.6541980504989624,
"learning_rate": 0.0015,
"loss": 1.5694,
"step": 2980
},
{
"epoch": 0.3154008438818565,
"grad_norm": 0.5993001461029053,
"learning_rate": 0.0015,
"loss": 1.5846,
"step": 2990
},
{
"epoch": 0.31645569620253167,
"grad_norm": 0.7039710283279419,
"learning_rate": 0.0015,
"loss": 1.5645,
"step": 3000
},
{
"epoch": 0.31751054852320676,
"grad_norm": 0.5771589279174805,
"learning_rate": 0.0015,
"loss": 1.573,
"step": 3010
},
{
"epoch": 0.31856540084388185,
"grad_norm": 0.6969435811042786,
"learning_rate": 0.0015,
"loss": 1.573,
"step": 3020
},
{
"epoch": 0.31962025316455694,
"grad_norm": 0.6383923292160034,
"learning_rate": 0.0015,
"loss": 1.5624,
"step": 3030
},
{
"epoch": 0.3206751054852321,
"grad_norm": 0.7560727596282959,
"learning_rate": 0.0015,
"loss": 1.5697,
"step": 3040
},
{
"epoch": 0.3217299578059072,
"grad_norm": 0.6635891199111938,
"learning_rate": 0.0015,
"loss": 1.5632,
"step": 3050
},
{
"epoch": 0.3227848101265823,
"grad_norm": 0.5856527090072632,
"learning_rate": 0.0015,
"loss": 1.5759,
"step": 3060
},
{
"epoch": 0.32383966244725737,
"grad_norm": 0.612700343132019,
"learning_rate": 0.0015,
"loss": 1.566,
"step": 3070
},
{
"epoch": 0.32489451476793246,
"grad_norm": 0.7660608291625977,
"learning_rate": 0.0015,
"loss": 1.5574,
"step": 3080
},
{
"epoch": 0.3259493670886076,
"grad_norm": 0.5987316966056824,
"learning_rate": 0.0015,
"loss": 1.5765,
"step": 3090
},
{
"epoch": 0.3270042194092827,
"grad_norm": 0.593343198299408,
"learning_rate": 0.0015,
"loss": 1.5601,
"step": 3100
},
{
"epoch": 0.3280590717299578,
"grad_norm": 0.6489712595939636,
"learning_rate": 0.0015,
"loss": 1.5662,
"step": 3110
},
{
"epoch": 0.3291139240506329,
"grad_norm": 0.6017663478851318,
"learning_rate": 0.0015,
"loss": 1.5659,
"step": 3120
},
{
"epoch": 0.33016877637130804,
"grad_norm": 0.6709532141685486,
"learning_rate": 0.0015,
"loss": 1.5793,
"step": 3130
},
{
"epoch": 0.33122362869198313,
"grad_norm": 0.7417532801628113,
"learning_rate": 0.0015,
"loss": 1.5606,
"step": 3140
},
{
"epoch": 0.3322784810126582,
"grad_norm": 0.6624449491500854,
"learning_rate": 0.0015,
"loss": 1.5636,
"step": 3150
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.6301703453063965,
"learning_rate": 0.0015,
"loss": 1.5653,
"step": 3160
},
{
"epoch": 0.33438818565400846,
"grad_norm": 0.5977314710617065,
"learning_rate": 0.0015,
"loss": 1.5681,
"step": 3170
},
{
"epoch": 0.33544303797468356,
"grad_norm": 0.6682421565055847,
"learning_rate": 0.0015,
"loss": 1.56,
"step": 3180
},
{
"epoch": 0.33649789029535865,
"grad_norm": 0.7982174754142761,
"learning_rate": 0.0015,
"loss": 1.5629,
"step": 3190
},
{
"epoch": 0.33755274261603374,
"grad_norm": 0.7748986482620239,
"learning_rate": 0.0015,
"loss": 1.5542,
"step": 3200
},
{
"epoch": 0.33860759493670883,
"grad_norm": 1.0765491724014282,
"learning_rate": 0.0015,
"loss": 1.5723,
"step": 3210
},
{
"epoch": 0.339662447257384,
"grad_norm": 0.7252717614173889,
"learning_rate": 0.0015,
"loss": 1.5583,
"step": 3220
},
{
"epoch": 0.3407172995780591,
"grad_norm": 0.5946524143218994,
"learning_rate": 0.0015,
"loss": 1.5547,
"step": 3230
},
{
"epoch": 0.34177215189873417,
"grad_norm": 0.6727193593978882,
"learning_rate": 0.0015,
"loss": 1.536,
"step": 3240
},
{
"epoch": 0.34282700421940926,
"grad_norm": 0.8016456365585327,
"learning_rate": 0.0015,
"loss": 1.5619,
"step": 3250
},
{
"epoch": 0.3438818565400844,
"grad_norm": 0.5796253681182861,
"learning_rate": 0.0015,
"loss": 1.5484,
"step": 3260
},
{
"epoch": 0.3449367088607595,
"grad_norm": 0.623274028301239,
"learning_rate": 0.0015,
"loss": 1.5662,
"step": 3270
},
{
"epoch": 0.3459915611814346,
"grad_norm": 0.7034549117088318,
"learning_rate": 0.0015,
"loss": 1.5664,
"step": 3280
},
{
"epoch": 0.3470464135021097,
"grad_norm": 0.8472448587417603,
"learning_rate": 0.0015,
"loss": 1.5481,
"step": 3290
},
{
"epoch": 0.34810126582278483,
"grad_norm": 0.6147744059562683,
"learning_rate": 0.0015,
"loss": 1.5589,
"step": 3300
},
{
"epoch": 0.3491561181434599,
"grad_norm": 0.6061049699783325,
"learning_rate": 0.0015,
"loss": 1.5564,
"step": 3310
},
{
"epoch": 0.350210970464135,
"grad_norm": 0.7331443428993225,
"learning_rate": 0.0015,
"loss": 1.5651,
"step": 3320
},
{
"epoch": 0.3512658227848101,
"grad_norm": 0.5762057900428772,
"learning_rate": 0.0015,
"loss": 1.5531,
"step": 3330
},
{
"epoch": 0.35232067510548526,
"grad_norm": 0.6111762523651123,
"learning_rate": 0.0015,
"loss": 1.5522,
"step": 3340
},
{
"epoch": 0.35337552742616035,
"grad_norm": 0.5986932516098022,
"learning_rate": 0.0015,
"loss": 1.5464,
"step": 3350
},
{
"epoch": 0.35443037974683544,
"grad_norm": 0.6562957763671875,
"learning_rate": 0.0015,
"loss": 1.5473,
"step": 3360
},
{
"epoch": 0.35548523206751054,
"grad_norm": 0.8789434432983398,
"learning_rate": 0.0015,
"loss": 1.5544,
"step": 3370
},
{
"epoch": 0.35654008438818563,
"grad_norm": 0.7937793731689453,
"learning_rate": 0.0015,
"loss": 1.552,
"step": 3380
},
{
"epoch": 0.3575949367088608,
"grad_norm": 0.5861942768096924,
"learning_rate": 0.0015,
"loss": 1.5473,
"step": 3390
},
{
"epoch": 0.35864978902953587,
"grad_norm": 0.6977989673614502,
"learning_rate": 0.0015,
"loss": 1.5521,
"step": 3400
},
{
"epoch": 0.35970464135021096,
"grad_norm": 0.5927336812019348,
"learning_rate": 0.0015,
"loss": 1.5499,
"step": 3410
},
{
"epoch": 0.36075949367088606,
"grad_norm": 0.8154973387718201,
"learning_rate": 0.0015,
"loss": 1.5311,
"step": 3420
},
{
"epoch": 0.3618143459915612,
"grad_norm": 0.5706732273101807,
"learning_rate": 0.0015,
"loss": 1.5487,
"step": 3430
},
{
"epoch": 0.3628691983122363,
"grad_norm": 0.6529673933982849,
"learning_rate": 0.0015,
"loss": 1.5441,
"step": 3440
},
{
"epoch": 0.3639240506329114,
"grad_norm": 0.5828253626823425,
"learning_rate": 0.0015,
"loss": 1.5426,
"step": 3450
},
{
"epoch": 0.3649789029535865,
"grad_norm": 0.6410821080207825,
"learning_rate": 0.0015,
"loss": 1.5341,
"step": 3460
},
{
"epoch": 0.36603375527426163,
"grad_norm": 0.7108343839645386,
"learning_rate": 0.0015,
"loss": 1.5497,
"step": 3470
},
{
"epoch": 0.3670886075949367,
"grad_norm": 0.5786426663398743,
"learning_rate": 0.0015,
"loss": 1.5572,
"step": 3480
},
{
"epoch": 0.3681434599156118,
"grad_norm": 0.71504807472229,
"learning_rate": 0.0015,
"loss": 1.5472,
"step": 3490
},
{
"epoch": 0.3691983122362869,
"grad_norm": 0.6172586679458618,
"learning_rate": 0.0015,
"loss": 1.5299,
"step": 3500
},
{
"epoch": 0.370253164556962,
"grad_norm": 0.7068558931350708,
"learning_rate": 0.0015,
"loss": 1.5505,
"step": 3510
},
{
"epoch": 0.37130801687763715,
"grad_norm": 0.6257584691047668,
"learning_rate": 0.0015,
"loss": 1.5441,
"step": 3520
},
{
"epoch": 0.37236286919831224,
"grad_norm": 0.6539843082427979,
"learning_rate": 0.0015,
"loss": 1.5561,
"step": 3530
},
{
"epoch": 0.37341772151898733,
"grad_norm": 0.9052338004112244,
"learning_rate": 0.0015,
"loss": 1.5518,
"step": 3540
},
{
"epoch": 0.3744725738396624,
"grad_norm": 0.676902174949646,
"learning_rate": 0.0015,
"loss": 1.5392,
"step": 3550
},
{
"epoch": 0.3755274261603376,
"grad_norm": 0.602618932723999,
"learning_rate": 0.0015,
"loss": 1.5389,
"step": 3560
},
{
"epoch": 0.37658227848101267,
"grad_norm": 1.043258786201477,
"learning_rate": 0.0015,
"loss": 1.5371,
"step": 3570
},
{
"epoch": 0.37763713080168776,
"grad_norm": 0.5973966717720032,
"learning_rate": 0.0015,
"loss": 1.5371,
"step": 3580
},
{
"epoch": 0.37869198312236285,
"grad_norm": 0.6773671507835388,
"learning_rate": 0.0015,
"loss": 1.549,
"step": 3590
},
{
"epoch": 0.379746835443038,
"grad_norm": 0.7791342735290527,
"learning_rate": 0.0015,
"loss": 1.5296,
"step": 3600
},
{
"epoch": 0.3808016877637131,
"grad_norm": 0.6235648989677429,
"learning_rate": 0.0015,
"loss": 1.5324,
"step": 3610
},
{
"epoch": 0.3818565400843882,
"grad_norm": 0.5803208351135254,
"learning_rate": 0.0015,
"loss": 1.5364,
"step": 3620
},
{
"epoch": 0.3829113924050633,
"grad_norm": 0.7806430459022522,
"learning_rate": 0.0015,
"loss": 1.5362,
"step": 3630
},
{
"epoch": 0.38396624472573837,
"grad_norm": 0.8068416118621826,
"learning_rate": 0.0015,
"loss": 1.5343,
"step": 3640
},
{
"epoch": 0.3850210970464135,
"grad_norm": 0.7203842997550964,
"learning_rate": 0.0015,
"loss": 1.5464,
"step": 3650
},
{
"epoch": 0.3860759493670886,
"grad_norm": 0.7165307402610779,
"learning_rate": 0.0015,
"loss": 1.536,
"step": 3660
},
{
"epoch": 0.3871308016877637,
"grad_norm": 0.6051152944564819,
"learning_rate": 0.0015,
"loss": 1.5307,
"step": 3670
},
{
"epoch": 0.3881856540084388,
"grad_norm": 1.0198311805725098,
"learning_rate": 0.0015,
"loss": 1.5341,
"step": 3680
},
{
"epoch": 0.38924050632911394,
"grad_norm": 0.6691511273384094,
"learning_rate": 0.0015,
"loss": 1.5458,
"step": 3690
},
{
"epoch": 0.39029535864978904,
"grad_norm": 0.6004876494407654,
"learning_rate": 0.0015,
"loss": 1.5479,
"step": 3700
},
{
"epoch": 0.39135021097046413,
"grad_norm": 0.6264767646789551,
"learning_rate": 0.0015,
"loss": 1.5332,
"step": 3710
},
{
"epoch": 0.3924050632911392,
"grad_norm": 0.6161850094795227,
"learning_rate": 0.0015,
"loss": 1.5134,
"step": 3720
},
{
"epoch": 0.39345991561181437,
"grad_norm": 0.5642967224121094,
"learning_rate": 0.0015,
"loss": 1.533,
"step": 3730
},
{
"epoch": 0.39451476793248946,
"grad_norm": 0.5615982413291931,
"learning_rate": 0.0015,
"loss": 1.5304,
"step": 3740
},
{
"epoch": 0.39556962025316456,
"grad_norm": 0.6446601152420044,
"learning_rate": 0.0015,
"loss": 1.5371,
"step": 3750
},
{
"epoch": 0.39662447257383965,
"grad_norm": 0.7530333995819092,
"learning_rate": 0.0015,
"loss": 1.5265,
"step": 3760
},
{
"epoch": 0.39767932489451474,
"grad_norm": 0.7596944570541382,
"learning_rate": 0.0015,
"loss": 1.5412,
"step": 3770
},
{
"epoch": 0.3987341772151899,
"grad_norm": 0.5665528774261475,
"learning_rate": 0.0015,
"loss": 1.5263,
"step": 3780
},
{
"epoch": 0.399789029535865,
"grad_norm": 0.578454315662384,
"learning_rate": 0.0015,
"loss": 1.531,
"step": 3790
},
{
"epoch": 0.4008438818565401,
"grad_norm": 0.5789613127708435,
"learning_rate": 0.0015,
"loss": 1.5343,
"step": 3800
},
{
"epoch": 0.40189873417721517,
"grad_norm": 0.7712839841842651,
"learning_rate": 0.0015,
"loss": 1.526,
"step": 3810
},
{
"epoch": 0.4029535864978903,
"grad_norm": 0.6859884262084961,
"learning_rate": 0.0015,
"loss": 1.5172,
"step": 3820
},
{
"epoch": 0.4040084388185654,
"grad_norm": 0.6037826538085938,
"learning_rate": 0.0015,
"loss": 1.5184,
"step": 3830
},
{
"epoch": 0.4050632911392405,
"grad_norm": 0.5984542965888977,
"learning_rate": 0.0015,
"loss": 1.5389,
"step": 3840
},
{
"epoch": 0.4061181434599156,
"grad_norm": 0.7246179580688477,
"learning_rate": 0.0015,
"loss": 1.5199,
"step": 3850
},
{
"epoch": 0.40717299578059074,
"grad_norm": 0.7035946846008301,
"learning_rate": 0.0015,
"loss": 1.5353,
"step": 3860
},
{
"epoch": 0.40822784810126583,
"grad_norm": 1.0902485847473145,
"learning_rate": 0.0015,
"loss": 1.5224,
"step": 3870
},
{
"epoch": 0.4092827004219409,
"grad_norm": 0.6348387002944946,
"learning_rate": 0.0015,
"loss": 1.5169,
"step": 3880
},
{
"epoch": 0.410337552742616,
"grad_norm": 0.5966281890869141,
"learning_rate": 0.0015,
"loss": 1.5145,
"step": 3890
},
{
"epoch": 0.41139240506329117,
"grad_norm": 1.0937188863754272,
"learning_rate": 0.0015,
"loss": 1.5162,
"step": 3900
},
{
"epoch": 0.41244725738396626,
"grad_norm": 0.5832434296607971,
"learning_rate": 0.0015,
"loss": 1.5172,
"step": 3910
},
{
"epoch": 0.41350210970464135,
"grad_norm": 0.6325321197509766,
"learning_rate": 0.0015,
"loss": 1.5157,
"step": 3920
},
{
"epoch": 0.41455696202531644,
"grad_norm": 0.5954620838165283,
"learning_rate": 0.0015,
"loss": 1.5191,
"step": 3930
},
{
"epoch": 0.41561181434599154,
"grad_norm": 0.6768201589584351,
"learning_rate": 0.0015,
"loss": 1.5151,
"step": 3940
},
{
"epoch": 0.4166666666666667,
"grad_norm": 1.0494052171707153,
"learning_rate": 0.0015,
"loss": 1.5227,
"step": 3950
},
{
"epoch": 0.4177215189873418,
"grad_norm": 0.646942675113678,
"learning_rate": 0.0015,
"loss": 1.5235,
"step": 3960
},
{
"epoch": 0.41877637130801687,
"grad_norm": 0.7022451758384705,
"learning_rate": 0.0015,
"loss": 1.5124,
"step": 3970
},
{
"epoch": 0.41983122362869196,
"grad_norm": 0.5785510540008545,
"learning_rate": 0.0015,
"loss": 1.5244,
"step": 3980
},
{
"epoch": 0.4208860759493671,
"grad_norm": 0.7240645885467529,
"learning_rate": 0.0015,
"loss": 1.5154,
"step": 3990
},
{
"epoch": 0.4219409282700422,
"grad_norm": 0.6127949357032776,
"learning_rate": 0.0015,
"loss": 1.5266,
"step": 4000
},
{
"epoch": 0.4229957805907173,
"grad_norm": 0.5847046375274658,
"learning_rate": 0.0015,
"loss": 1.5123,
"step": 4010
},
{
"epoch": 0.4240506329113924,
"grad_norm": 0.5906983017921448,
"learning_rate": 0.0015,
"loss": 1.5176,
"step": 4020
},
{
"epoch": 0.42510548523206754,
"grad_norm": 0.5839008688926697,
"learning_rate": 0.0015,
"loss": 1.5107,
"step": 4030
},
{
"epoch": 0.42616033755274263,
"grad_norm": 0.6024583578109741,
"learning_rate": 0.0015,
"loss": 1.5172,
"step": 4040
},
{
"epoch": 0.4272151898734177,
"grad_norm": 0.5539279580116272,
"learning_rate": 0.0015,
"loss": 1.5194,
"step": 4050
},
{
"epoch": 0.4282700421940928,
"grad_norm": 0.7165290117263794,
"learning_rate": 0.0015,
"loss": 1.5243,
"step": 4060
},
{
"epoch": 0.4293248945147679,
"grad_norm": 0.7260866165161133,
"learning_rate": 0.0015,
"loss": 1.5197,
"step": 4070
},
{
"epoch": 0.43037974683544306,
"grad_norm": 1.50114905834198,
"learning_rate": 0.0015,
"loss": 1.5285,
"step": 4080
},
{
"epoch": 0.43143459915611815,
"grad_norm": 0.6995235681533813,
"learning_rate": 0.0015,
"loss": 1.5194,
"step": 4090
},
{
"epoch": 0.43248945147679324,
"grad_norm": 0.5949418544769287,
"learning_rate": 0.0015,
"loss": 1.5106,
"step": 4100
},
{
"epoch": 0.43354430379746833,
"grad_norm": 0.6881145238876343,
"learning_rate": 0.0015,
"loss": 1.5053,
"step": 4110
},
{
"epoch": 0.4345991561181435,
"grad_norm": 0.8568872809410095,
"learning_rate": 0.0015,
"loss": 1.5137,
"step": 4120
},
{
"epoch": 0.4356540084388186,
"grad_norm": 0.7430587410926819,
"learning_rate": 0.0015,
"loss": 1.5044,
"step": 4130
},
{
"epoch": 0.43670886075949367,
"grad_norm": 0.637575089931488,
"learning_rate": 0.0015,
"loss": 1.5071,
"step": 4140
},
{
"epoch": 0.43776371308016876,
"grad_norm": 0.6594664454460144,
"learning_rate": 0.0015,
"loss": 1.5144,
"step": 4150
},
{
"epoch": 0.4388185654008439,
"grad_norm": 0.6385189890861511,
"learning_rate": 0.0015,
"loss": 1.5062,
"step": 4160
},
{
"epoch": 0.439873417721519,
"grad_norm": 0.5632670521736145,
"learning_rate": 0.0015,
"loss": 1.5087,
"step": 4170
},
{
"epoch": 0.4409282700421941,
"grad_norm": 0.5554453730583191,
"learning_rate": 0.0015,
"loss": 1.5045,
"step": 4180
},
{
"epoch": 0.4419831223628692,
"grad_norm": 0.6365271210670471,
"learning_rate": 0.0015,
"loss": 1.5093,
"step": 4190
},
{
"epoch": 0.4430379746835443,
"grad_norm": 0.5896157622337341,
"learning_rate": 0.0015,
"loss": 1.5106,
"step": 4200
},
{
"epoch": 0.4440928270042194,
"grad_norm": 0.7115333676338196,
"learning_rate": 0.0015,
"loss": 1.5041,
"step": 4210
},
{
"epoch": 0.4451476793248945,
"grad_norm": 0.7665364146232605,
"learning_rate": 0.0015,
"loss": 1.5102,
"step": 4220
},
{
"epoch": 0.4462025316455696,
"grad_norm": 0.8889451622962952,
"learning_rate": 0.0015,
"loss": 1.5094,
"step": 4230
},
{
"epoch": 0.4472573839662447,
"grad_norm": 0.564806342124939,
"learning_rate": 0.0015,
"loss": 1.5081,
"step": 4240
},
{
"epoch": 0.44831223628691985,
"grad_norm": 0.6670458316802979,
"learning_rate": 0.0015,
"loss": 1.5102,
"step": 4250
},
{
"epoch": 0.44936708860759494,
"grad_norm": 0.9738371968269348,
"learning_rate": 0.0015,
"loss": 1.5169,
"step": 4260
},
{
"epoch": 0.45042194092827004,
"grad_norm": 0.5916040539741516,
"learning_rate": 0.0015,
"loss": 1.5058,
"step": 4270
},
{
"epoch": 0.45147679324894513,
"grad_norm": 0.7631626725196838,
"learning_rate": 0.0015,
"loss": 1.4982,
"step": 4280
},
{
"epoch": 0.4525316455696203,
"grad_norm": 0.913382887840271,
"learning_rate": 0.0015,
"loss": 1.5118,
"step": 4290
},
{
"epoch": 0.45358649789029537,
"grad_norm": 0.6988171339035034,
"learning_rate": 0.0015,
"loss": 1.5083,
"step": 4300
},
{
"epoch": 0.45464135021097046,
"grad_norm": 0.583550214767456,
"learning_rate": 0.0015,
"loss": 1.5036,
"step": 4310
},
{
"epoch": 0.45569620253164556,
"grad_norm": 0.6362908482551575,
"learning_rate": 0.0015,
"loss": 1.4979,
"step": 4320
},
{
"epoch": 0.45675105485232065,
"grad_norm": 0.6140376329421997,
"learning_rate": 0.0015,
"loss": 1.5097,
"step": 4330
},
{
"epoch": 0.4578059071729958,
"grad_norm": 0.7203188538551331,
"learning_rate": 0.0015,
"loss": 1.4996,
"step": 4340
},
{
"epoch": 0.4588607594936709,
"grad_norm": 0.730200469493866,
"learning_rate": 0.0015,
"loss": 1.502,
"step": 4350
},
{
"epoch": 0.459915611814346,
"grad_norm": 0.661221444606781,
"learning_rate": 0.0015,
"loss": 1.518,
"step": 4360
},
{
"epoch": 0.4609704641350211,
"grad_norm": 0.6184653043746948,
"learning_rate": 0.0015,
"loss": 1.4961,
"step": 4370
},
{
"epoch": 0.4620253164556962,
"grad_norm": 0.5568481087684631,
"learning_rate": 0.0015,
"loss": 1.5016,
"step": 4380
},
{
"epoch": 0.4630801687763713,
"grad_norm": 0.6127128005027771,
"learning_rate": 0.0015,
"loss": 1.4975,
"step": 4390
},
{
"epoch": 0.4641350210970464,
"grad_norm": 0.6330604553222656,
"learning_rate": 0.0015,
"loss": 1.5029,
"step": 4400
},
{
"epoch": 0.4651898734177215,
"grad_norm": 0.671295702457428,
"learning_rate": 0.0015,
"loss": 1.5177,
"step": 4410
},
{
"epoch": 0.46624472573839665,
"grad_norm": 0.6765188574790955,
"learning_rate": 0.0015,
"loss": 1.5008,
"step": 4420
},
{
"epoch": 0.46729957805907174,
"grad_norm": 0.638823926448822,
"learning_rate": 0.0015,
"loss": 1.5062,
"step": 4430
},
{
"epoch": 0.46835443037974683,
"grad_norm": 0.5836365818977356,
"learning_rate": 0.0015,
"loss": 1.4979,
"step": 4440
},
{
"epoch": 0.4694092827004219,
"grad_norm": 0.6062256693840027,
"learning_rate": 0.0015,
"loss": 1.5066,
"step": 4450
},
{
"epoch": 0.4704641350210971,
"grad_norm": 0.6792214512825012,
"learning_rate": 0.0015,
"loss": 1.4933,
"step": 4460
},
{
"epoch": 0.47151898734177217,
"grad_norm": 0.5764203071594238,
"learning_rate": 0.0015,
"loss": 1.504,
"step": 4470
},
{
"epoch": 0.47257383966244726,
"grad_norm": 0.6028873920440674,
"learning_rate": 0.0015,
"loss": 1.5068,
"step": 4480
},
{
"epoch": 0.47362869198312235,
"grad_norm": 0.7062868475914001,
"learning_rate": 0.0015,
"loss": 1.4981,
"step": 4490
},
{
"epoch": 0.47468354430379744,
"grad_norm": 0.5581313371658325,
"learning_rate": 0.0015,
"loss": 1.4964,
"step": 4500
},
{
"epoch": 0.4757383966244726,
"grad_norm": 0.6057865023612976,
"learning_rate": 0.0015,
"loss": 1.5029,
"step": 4510
},
{
"epoch": 0.4767932489451477,
"grad_norm": 0.8442615866661072,
"learning_rate": 0.0015,
"loss": 1.4829,
"step": 4520
},
{
"epoch": 0.4778481012658228,
"grad_norm": 0.6654320359230042,
"learning_rate": 0.0015,
"loss": 1.4991,
"step": 4530
},
{
"epoch": 0.47890295358649787,
"grad_norm": 0.6202135682106018,
"learning_rate": 0.0015,
"loss": 1.5046,
"step": 4540
},
{
"epoch": 0.479957805907173,
"grad_norm": 0.7283245921134949,
"learning_rate": 0.0015,
"loss": 1.5016,
"step": 4550
},
{
"epoch": 0.4810126582278481,
"grad_norm": 0.6854476928710938,
"learning_rate": 0.0015,
"loss": 1.4903,
"step": 4560
},
{
"epoch": 0.4820675105485232,
"grad_norm": 0.6297418475151062,
"learning_rate": 0.0015,
"loss": 1.4754,
"step": 4570
},
{
"epoch": 0.4831223628691983,
"grad_norm": 0.7521628141403198,
"learning_rate": 0.0015,
"loss": 1.5003,
"step": 4580
},
{
"epoch": 0.48417721518987344,
"grad_norm": 0.6098962426185608,
"learning_rate": 0.0015,
"loss": 1.4848,
"step": 4590
},
{
"epoch": 0.48523206751054854,
"grad_norm": 0.612733006477356,
"learning_rate": 0.0015,
"loss": 1.5043,
"step": 4600
},
{
"epoch": 0.48628691983122363,
"grad_norm": 1.1899629831314087,
"learning_rate": 0.0015,
"loss": 1.5079,
"step": 4610
},
{
"epoch": 0.4873417721518987,
"grad_norm": 1.1455023288726807,
"learning_rate": 0.0015,
"loss": 1.4842,
"step": 4620
},
{
"epoch": 0.4883966244725738,
"grad_norm": 0.5533867478370667,
"learning_rate": 0.0015,
"loss": 1.4969,
"step": 4630
},
{
"epoch": 0.48945147679324896,
"grad_norm": 0.5487686991691589,
"learning_rate": 0.0015,
"loss": 1.4972,
"step": 4640
},
{
"epoch": 0.49050632911392406,
"grad_norm": 0.5525293946266174,
"learning_rate": 0.0015,
"loss": 1.5002,
"step": 4650
},
{
"epoch": 0.49156118143459915,
"grad_norm": 0.6435783505439758,
"learning_rate": 0.0015,
"loss": 1.4906,
"step": 4660
},
{
"epoch": 0.49261603375527424,
"grad_norm": 0.8155152201652527,
"learning_rate": 0.0015,
"loss": 1.4919,
"step": 4670
},
{
"epoch": 0.4936708860759494,
"grad_norm": 0.5618850588798523,
"learning_rate": 0.0015,
"loss": 1.485,
"step": 4680
},
{
"epoch": 0.4947257383966245,
"grad_norm": 0.8420971632003784,
"learning_rate": 0.0015,
"loss": 1.4809,
"step": 4690
},
{
"epoch": 0.4957805907172996,
"grad_norm": 0.6340466737747192,
"learning_rate": 0.0015,
"loss": 1.4943,
"step": 4700
},
{
"epoch": 0.49683544303797467,
"grad_norm": 0.5540546178817749,
"learning_rate": 0.0015,
"loss": 1.4926,
"step": 4710
},
{
"epoch": 0.4978902953586498,
"grad_norm": 0.8049904704093933,
"learning_rate": 0.0015,
"loss": 1.4892,
"step": 4720
},
{
"epoch": 0.4989451476793249,
"grad_norm": 0.6917950510978699,
"learning_rate": 0.0015,
"loss": 1.4877,
"step": 4730
},
{
"epoch": 0.5,
"grad_norm": 0.7213568091392517,
"learning_rate": 0.0015,
"loss": 1.4872,
"step": 4740
},
{
"epoch": 0.5010548523206751,
"grad_norm": 0.7253615856170654,
"learning_rate": 0.0015,
"loss": 1.5032,
"step": 4750
},
{
"epoch": 0.5021097046413502,
"grad_norm": 0.5511662364006042,
"learning_rate": 0.0015,
"loss": 1.4899,
"step": 4760
},
{
"epoch": 0.5031645569620253,
"grad_norm": 0.5705893039703369,
"learning_rate": 0.0015,
"loss": 1.4864,
"step": 4770
},
{
"epoch": 0.5042194092827004,
"grad_norm": 0.5279271006584167,
"learning_rate": 0.0015,
"loss": 1.4967,
"step": 4780
},
{
"epoch": 0.5052742616033755,
"grad_norm": 0.7271177768707275,
"learning_rate": 0.0015,
"loss": 1.4801,
"step": 4790
},
{
"epoch": 0.5063291139240507,
"grad_norm": 0.6367512345314026,
"learning_rate": 0.0015,
"loss": 1.4884,
"step": 4800
},
{
"epoch": 0.5073839662447257,
"grad_norm": 0.5229652523994446,
"learning_rate": 0.0015,
"loss": 1.4873,
"step": 4810
},
{
"epoch": 0.5084388185654009,
"grad_norm": 0.6838683485984802,
"learning_rate": 0.0015,
"loss": 1.5039,
"step": 4820
},
{
"epoch": 0.509493670886076,
"grad_norm": 0.9289997816085815,
"learning_rate": 0.0015,
"loss": 1.4907,
"step": 4830
},
{
"epoch": 0.510548523206751,
"grad_norm": 0.5755728483200073,
"learning_rate": 0.0015,
"loss": 1.4808,
"step": 4840
},
{
"epoch": 0.5116033755274262,
"grad_norm": 0.5589598417282104,
"learning_rate": 0.0015,
"loss": 1.4926,
"step": 4850
},
{
"epoch": 0.5126582278481012,
"grad_norm": 0.6322404742240906,
"learning_rate": 0.0015,
"loss": 1.4822,
"step": 4860
},
{
"epoch": 0.5137130801687764,
"grad_norm": 0.689440906047821,
"learning_rate": 0.0015,
"loss": 1.4757,
"step": 4870
},
{
"epoch": 0.5147679324894515,
"grad_norm": 0.8826259970664978,
"learning_rate": 0.0015,
"loss": 1.4954,
"step": 4880
},
{
"epoch": 0.5158227848101266,
"grad_norm": 0.7584018707275391,
"learning_rate": 0.0015,
"loss": 1.4861,
"step": 4890
},
{
"epoch": 0.5168776371308017,
"grad_norm": 0.6056618690490723,
"learning_rate": 0.0015,
"loss": 1.4939,
"step": 4900
},
{
"epoch": 0.5179324894514767,
"grad_norm": 0.6169233918190002,
"learning_rate": 0.0015,
"loss": 1.4865,
"step": 4910
},
{
"epoch": 0.5189873417721519,
"grad_norm": 0.5963829755783081,
"learning_rate": 0.0015,
"loss": 1.4783,
"step": 4920
},
{
"epoch": 0.520042194092827,
"grad_norm": 0.5480197668075562,
"learning_rate": 0.0015,
"loss": 1.4871,
"step": 4930
},
{
"epoch": 0.5210970464135021,
"grad_norm": 0.6444430351257324,
"learning_rate": 0.0015,
"loss": 1.4852,
"step": 4940
},
{
"epoch": 0.5221518987341772,
"grad_norm": 0.8089283108711243,
"learning_rate": 0.0015,
"loss": 1.4918,
"step": 4950
},
{
"epoch": 0.5232067510548524,
"grad_norm": 0.7816188931465149,
"learning_rate": 0.0015,
"loss": 1.4756,
"step": 4960
},
{
"epoch": 0.5242616033755274,
"grad_norm": 0.650807797908783,
"learning_rate": 0.0015,
"loss": 1.4835,
"step": 4970
},
{
"epoch": 0.5253164556962026,
"grad_norm": 0.7011664509773254,
"learning_rate": 0.0015,
"loss": 1.4754,
"step": 4980
},
{
"epoch": 0.5263713080168776,
"grad_norm": 0.6017739176750183,
"learning_rate": 0.0015,
"loss": 1.4776,
"step": 4990
},
{
"epoch": 0.5274261603375527,
"grad_norm": 0.728861391544342,
"learning_rate": 0.0015,
"loss": 1.4917,
"step": 5000
},
{
"epoch": 0.5284810126582279,
"grad_norm": 0.6405869722366333,
"learning_rate": 0.0015,
"loss": 1.4853,
"step": 5010
},
{
"epoch": 0.5295358649789029,
"grad_norm": 0.5563636422157288,
"learning_rate": 0.0015,
"loss": 1.4746,
"step": 5020
},
{
"epoch": 0.5305907172995781,
"grad_norm": 0.5111358761787415,
"learning_rate": 0.0015,
"loss": 1.463,
"step": 5030
},
{
"epoch": 0.5316455696202531,
"grad_norm": 0.5340365767478943,
"learning_rate": 0.0015,
"loss": 1.4904,
"step": 5040
},
{
"epoch": 0.5327004219409283,
"grad_norm": 0.6350395679473877,
"learning_rate": 0.0015,
"loss": 1.4756,
"step": 5050
},
{
"epoch": 0.5337552742616034,
"grad_norm": 0.7631687521934509,
"learning_rate": 0.0015,
"loss": 1.4917,
"step": 5060
},
{
"epoch": 0.5348101265822784,
"grad_norm": 0.9575885534286499,
"learning_rate": 0.0015,
"loss": 1.4792,
"step": 5070
},
{
"epoch": 0.5358649789029536,
"grad_norm": 0.5712935924530029,
"learning_rate": 0.0015,
"loss": 1.4894,
"step": 5080
},
{
"epoch": 0.5369198312236287,
"grad_norm": 0.5393574237823486,
"learning_rate": 0.0015,
"loss": 1.4836,
"step": 5090
},
{
"epoch": 0.5379746835443038,
"grad_norm": 0.6820569634437561,
"learning_rate": 0.0015,
"loss": 1.4647,
"step": 5100
},
{
"epoch": 0.5390295358649789,
"grad_norm": 0.8766012787818909,
"learning_rate": 0.0015,
"loss": 1.4812,
"step": 5110
},
{
"epoch": 0.540084388185654,
"grad_norm": 0.5794424414634705,
"learning_rate": 0.0015,
"loss": 1.4779,
"step": 5120
},
{
"epoch": 0.5411392405063291,
"grad_norm": 0.6964527368545532,
"learning_rate": 0.0015,
"loss": 1.474,
"step": 5130
},
{
"epoch": 0.5421940928270043,
"grad_norm": 0.84315425157547,
"learning_rate": 0.0015,
"loss": 1.4912,
"step": 5140
},
{
"epoch": 0.5432489451476793,
"grad_norm": 0.6195699572563171,
"learning_rate": 0.0015,
"loss": 1.4876,
"step": 5150
},
{
"epoch": 0.5443037974683544,
"grad_norm": 0.5797995328903198,
"learning_rate": 0.0015,
"loss": 1.4779,
"step": 5160
},
{
"epoch": 0.5453586497890295,
"grad_norm": 0.5988919734954834,
"learning_rate": 0.0015,
"loss": 1.4789,
"step": 5170
},
{
"epoch": 0.5464135021097046,
"grad_norm": 0.8923475742340088,
"learning_rate": 0.0015,
"loss": 1.4911,
"step": 5180
},
{
"epoch": 0.5474683544303798,
"grad_norm": 0.7122047543525696,
"learning_rate": 0.0015,
"loss": 1.4708,
"step": 5190
},
{
"epoch": 0.5485232067510548,
"grad_norm": 0.9506706595420837,
"learning_rate": 0.0015,
"loss": 1.4838,
"step": 5200
},
{
"epoch": 0.54957805907173,
"grad_norm": 0.6243468523025513,
"learning_rate": 0.0015,
"loss": 1.4701,
"step": 5210
},
{
"epoch": 0.5506329113924051,
"grad_norm": 0.5693705081939697,
"learning_rate": 0.0015,
"loss": 1.4804,
"step": 5220
},
{
"epoch": 0.5516877637130801,
"grad_norm": 0.5985523462295532,
"learning_rate": 0.0015,
"loss": 1.4735,
"step": 5230
},
{
"epoch": 0.5527426160337553,
"grad_norm": 0.5486128926277161,
"learning_rate": 0.0015,
"loss": 1.4799,
"step": 5240
},
{
"epoch": 0.5537974683544303,
"grad_norm": 0.788480818271637,
"learning_rate": 0.0015,
"loss": 1.4712,
"step": 5250
},
{
"epoch": 0.5548523206751055,
"grad_norm": 0.5885704159736633,
"learning_rate": 0.0015,
"loss": 1.4764,
"step": 5260
},
{
"epoch": 0.5559071729957806,
"grad_norm": 0.5659143924713135,
"learning_rate": 0.0015,
"loss": 1.4817,
"step": 5270
},
{
"epoch": 0.5569620253164557,
"grad_norm": 0.5663936734199524,
"learning_rate": 0.0015,
"loss": 1.4748,
"step": 5280
},
{
"epoch": 0.5580168776371308,
"grad_norm": 0.8281134963035583,
"learning_rate": 0.0015,
"loss": 1.4771,
"step": 5290
},
{
"epoch": 0.5590717299578059,
"grad_norm": 0.5809610486030579,
"learning_rate": 0.0015,
"loss": 1.481,
"step": 5300
},
{
"epoch": 0.560126582278481,
"grad_norm": 0.5774869322776794,
"learning_rate": 0.0015,
"loss": 1.4687,
"step": 5310
},
{
"epoch": 0.5611814345991561,
"grad_norm": 0.5578456521034241,
"learning_rate": 0.0015,
"loss": 1.468,
"step": 5320
},
{
"epoch": 0.5622362869198312,
"grad_norm": 0.5626557469367981,
"learning_rate": 0.0015,
"loss": 1.4802,
"step": 5330
},
{
"epoch": 0.5632911392405063,
"grad_norm": 0.5560243725776672,
"learning_rate": 0.0015,
"loss": 1.4688,
"step": 5340
},
{
"epoch": 0.5643459915611815,
"grad_norm": 0.7333641052246094,
"learning_rate": 0.0015,
"loss": 1.4641,
"step": 5350
},
{
"epoch": 0.5654008438818565,
"grad_norm": 1.0198782682418823,
"learning_rate": 0.0015,
"loss": 1.4749,
"step": 5360
},
{
"epoch": 0.5664556962025317,
"grad_norm": 0.6762611865997314,
"learning_rate": 0.0015,
"loss": 1.4729,
"step": 5370
},
{
"epoch": 0.5675105485232067,
"grad_norm": 0.5590974688529968,
"learning_rate": 0.0015,
"loss": 1.4684,
"step": 5380
},
{
"epoch": 0.5685654008438819,
"grad_norm": 0.599421501159668,
"learning_rate": 0.0015,
"loss": 1.4669,
"step": 5390
},
{
"epoch": 0.569620253164557,
"grad_norm": 0.5525367856025696,
"learning_rate": 0.0015,
"loss": 1.4678,
"step": 5400
},
{
"epoch": 0.570675105485232,
"grad_norm": 0.5372371673583984,
"learning_rate": 0.0015,
"loss": 1.4699,
"step": 5410
},
{
"epoch": 0.5717299578059072,
"grad_norm": 0.5424435138702393,
"learning_rate": 0.0015,
"loss": 1.4641,
"step": 5420
},
{
"epoch": 0.5727848101265823,
"grad_norm": 0.6444362998008728,
"learning_rate": 0.0015,
"loss": 1.4597,
"step": 5430
},
{
"epoch": 0.5738396624472574,
"grad_norm": 0.7216619849205017,
"learning_rate": 0.0015,
"loss": 1.4592,
"step": 5440
},
{
"epoch": 0.5748945147679325,
"grad_norm": 0.7893200516700745,
"learning_rate": 0.0015,
"loss": 1.4706,
"step": 5450
},
{
"epoch": 0.5759493670886076,
"grad_norm": 0.7450786828994751,
"learning_rate": 0.0015,
"loss": 1.4722,
"step": 5460
},
{
"epoch": 0.5770042194092827,
"grad_norm": 0.9274654388427734,
"learning_rate": 0.0015,
"loss": 1.4693,
"step": 5470
},
{
"epoch": 0.5780590717299579,
"grad_norm": 0.5277224183082581,
"learning_rate": 0.0015,
"loss": 1.4714,
"step": 5480
},
{
"epoch": 0.5791139240506329,
"grad_norm": 0.5416193604469299,
"learning_rate": 0.0015,
"loss": 1.4746,
"step": 5490
},
{
"epoch": 0.580168776371308,
"grad_norm": 0.6536852121353149,
"learning_rate": 0.0015,
"loss": 1.4657,
"step": 5500
},
{
"epoch": 0.5812236286919831,
"grad_norm": 1.0416918992996216,
"learning_rate": 0.0015,
"loss": 1.4728,
"step": 5510
},
{
"epoch": 0.5822784810126582,
"grad_norm": 0.6090261340141296,
"learning_rate": 0.0015,
"loss": 1.4641,
"step": 5520
},
{
"epoch": 0.5833333333333334,
"grad_norm": 0.790364682674408,
"learning_rate": 0.0015,
"loss": 1.4582,
"step": 5530
},
{
"epoch": 0.5843881856540084,
"grad_norm": 0.5679715275764465,
"learning_rate": 0.0015,
"loss": 1.4559,
"step": 5540
},
{
"epoch": 0.5854430379746836,
"grad_norm": 0.6483580470085144,
"learning_rate": 0.0015,
"loss": 1.4715,
"step": 5550
},
{
"epoch": 0.5864978902953587,
"grad_norm": 0.6752976775169373,
"learning_rate": 0.0015,
"loss": 1.4796,
"step": 5560
},
{
"epoch": 0.5875527426160337,
"grad_norm": 0.5237153768539429,
"learning_rate": 0.0015,
"loss": 1.4658,
"step": 5570
},
{
"epoch": 0.5886075949367089,
"grad_norm": 0.6024871468544006,
"learning_rate": 0.0015,
"loss": 1.4645,
"step": 5580
},
{
"epoch": 0.5896624472573839,
"grad_norm": 0.569186270236969,
"learning_rate": 0.0015,
"loss": 1.4663,
"step": 5590
},
{
"epoch": 0.5907172995780591,
"grad_norm": 0.6288050413131714,
"learning_rate": 0.0015,
"loss": 1.4614,
"step": 5600
},
{
"epoch": 0.5917721518987342,
"grad_norm": 0.5551683306694031,
"learning_rate": 0.0015,
"loss": 1.4664,
"step": 5610
},
{
"epoch": 0.5928270042194093,
"grad_norm": 0.575771152973175,
"learning_rate": 0.0015,
"loss": 1.474,
"step": 5620
},
{
"epoch": 0.5938818565400844,
"grad_norm": 0.7515531182289124,
"learning_rate": 0.0015,
"loss": 1.4676,
"step": 5630
},
{
"epoch": 0.5949367088607594,
"grad_norm": 0.5615184307098389,
"learning_rate": 0.0015,
"loss": 1.4779,
"step": 5640
},
{
"epoch": 0.5959915611814346,
"grad_norm": 0.7747194170951843,
"learning_rate": 0.0015,
"loss": 1.4756,
"step": 5650
},
{
"epoch": 0.5970464135021097,
"grad_norm": 0.8152536749839783,
"learning_rate": 0.0015,
"loss": 1.4673,
"step": 5660
},
{
"epoch": 0.5981012658227848,
"grad_norm": 0.6172899007797241,
"learning_rate": 0.0015,
"loss": 1.4735,
"step": 5670
},
{
"epoch": 0.5991561181434599,
"grad_norm": 0.6515701413154602,
"learning_rate": 0.0015,
"loss": 1.474,
"step": 5680
},
{
"epoch": 0.6002109704641351,
"grad_norm": 1.0846974849700928,
"learning_rate": 0.0015,
"loss": 1.4552,
"step": 5690
},
{
"epoch": 0.6012658227848101,
"grad_norm": 0.6162060499191284,
"learning_rate": 0.0015,
"loss": 1.4589,
"step": 5700
},
{
"epoch": 0.6023206751054853,
"grad_norm": 0.5707959532737732,
"learning_rate": 0.0015,
"loss": 1.462,
"step": 5710
},
{
"epoch": 0.6033755274261603,
"grad_norm": 0.5216309428215027,
"learning_rate": 0.0015,
"loss": 1.4642,
"step": 5720
},
{
"epoch": 0.6044303797468354,
"grad_norm": 0.5523501038551331,
"learning_rate": 0.0015,
"loss": 1.4665,
"step": 5730
},
{
"epoch": 0.6054852320675106,
"grad_norm": 0.5915478467941284,
"learning_rate": 0.0015,
"loss": 1.4617,
"step": 5740
},
{
"epoch": 0.6065400843881856,
"grad_norm": 0.6726484894752502,
"learning_rate": 0.0015,
"loss": 1.4377,
"step": 5750
},
{
"epoch": 0.6075949367088608,
"grad_norm": 0.6674075722694397,
"learning_rate": 0.0015,
"loss": 1.448,
"step": 5760
},
{
"epoch": 0.6086497890295358,
"grad_norm": 0.6071065068244934,
"learning_rate": 0.0015,
"loss": 1.4511,
"step": 5770
},
{
"epoch": 0.609704641350211,
"grad_norm": 0.6001817584037781,
"learning_rate": 0.0015,
"loss": 1.4581,
"step": 5780
},
{
"epoch": 0.6107594936708861,
"grad_norm": 0.5846808552742004,
"learning_rate": 0.0015,
"loss": 1.4546,
"step": 5790
},
{
"epoch": 0.6118143459915611,
"grad_norm": 0.5775858759880066,
"learning_rate": 0.0015,
"loss": 1.458,
"step": 5800
},
{
"epoch": 0.6128691983122363,
"grad_norm": 0.572533130645752,
"learning_rate": 0.0015,
"loss": 1.4633,
"step": 5810
},
{
"epoch": 0.6139240506329114,
"grad_norm": 0.8598059415817261,
"learning_rate": 0.0015,
"loss": 1.4779,
"step": 5820
},
{
"epoch": 0.6149789029535865,
"grad_norm": 0.635729968547821,
"learning_rate": 0.0015,
"loss": 1.4605,
"step": 5830
},
{
"epoch": 0.6160337552742616,
"grad_norm": 0.630516767501831,
"learning_rate": 0.0015,
"loss": 1.4715,
"step": 5840
},
{
"epoch": 0.6170886075949367,
"grad_norm": 0.7826014757156372,
"learning_rate": 0.0015,
"loss": 1.4753,
"step": 5850
},
{
"epoch": 0.6181434599156118,
"grad_norm": 0.613071858882904,
"learning_rate": 0.0015,
"loss": 1.4611,
"step": 5860
},
{
"epoch": 0.619198312236287,
"grad_norm": 0.6302315592765808,
"learning_rate": 0.0015,
"loss": 1.4562,
"step": 5870
},
{
"epoch": 0.620253164556962,
"grad_norm": 0.7535067200660706,
"learning_rate": 0.0015,
"loss": 1.4655,
"step": 5880
},
{
"epoch": 0.6213080168776371,
"grad_norm": 0.5600311160087585,
"learning_rate": 0.0015,
"loss": 1.456,
"step": 5890
},
{
"epoch": 0.6223628691983122,
"grad_norm": 0.6248704195022583,
"learning_rate": 0.0015,
"loss": 1.4593,
"step": 5900
},
{
"epoch": 0.6234177215189873,
"grad_norm": 0.6140940189361572,
"learning_rate": 0.0015,
"loss": 1.4526,
"step": 5910
},
{
"epoch": 0.6244725738396625,
"grad_norm": 0.5500303506851196,
"learning_rate": 0.0015,
"loss": 1.4454,
"step": 5920
},
{
"epoch": 0.6255274261603375,
"grad_norm": 0.9800376296043396,
"learning_rate": 0.0015,
"loss": 1.4555,
"step": 5930
},
{
"epoch": 0.6265822784810127,
"grad_norm": 0.7076262831687927,
"learning_rate": 0.0015,
"loss": 1.4621,
"step": 5940
},
{
"epoch": 0.6276371308016878,
"grad_norm": 0.5389084815979004,
"learning_rate": 0.0015,
"loss": 1.4505,
"step": 5950
},
{
"epoch": 0.6286919831223629,
"grad_norm": 0.5484883785247803,
"learning_rate": 0.0015,
"loss": 1.4574,
"step": 5960
},
{
"epoch": 0.629746835443038,
"grad_norm": 0.5646184682846069,
"learning_rate": 0.0015,
"loss": 1.4567,
"step": 5970
},
{
"epoch": 0.630801687763713,
"grad_norm": 0.5847921967506409,
"learning_rate": 0.0015,
"loss": 1.4406,
"step": 5980
},
{
"epoch": 0.6318565400843882,
"grad_norm": 0.696679949760437,
"learning_rate": 0.0015,
"loss": 1.4616,
"step": 5990
},
{
"epoch": 0.6329113924050633,
"grad_norm": 0.5997524261474609,
"learning_rate": 0.0015,
"loss": 1.4693,
"step": 6000
},
{
"epoch": 0.6339662447257384,
"grad_norm": 0.6613350510597229,
"learning_rate": 0.0015,
"loss": 1.4575,
"step": 6010
},
{
"epoch": 0.6350210970464135,
"grad_norm": 0.9558404088020325,
"learning_rate": 0.0015,
"loss": 1.4601,
"step": 6020
},
{
"epoch": 0.6360759493670886,
"grad_norm": 0.5578508377075195,
"learning_rate": 0.0015,
"loss": 1.4424,
"step": 6030
},
{
"epoch": 0.6371308016877637,
"grad_norm": 0.5517908930778503,
"learning_rate": 0.0015,
"loss": 1.4604,
"step": 6040
},
{
"epoch": 0.6381856540084389,
"grad_norm": 0.555235743522644,
"learning_rate": 0.0015,
"loss": 1.4416,
"step": 6050
},
{
"epoch": 0.6392405063291139,
"grad_norm": 0.6116271615028381,
"learning_rate": 0.0015,
"loss": 1.4607,
"step": 6060
},
{
"epoch": 0.640295358649789,
"grad_norm": 0.5915717482566833,
"learning_rate": 0.0015,
"loss": 1.453,
"step": 6070
},
{
"epoch": 0.6413502109704642,
"grad_norm": 0.636180579662323,
"learning_rate": 0.0015,
"loss": 1.451,
"step": 6080
},
{
"epoch": 0.6424050632911392,
"grad_norm": 0.5188226103782654,
"learning_rate": 0.0015,
"loss": 1.451,
"step": 6090
},
{
"epoch": 0.6434599156118144,
"grad_norm": 0.643001914024353,
"learning_rate": 0.0015,
"loss": 1.446,
"step": 6100
},
{
"epoch": 0.6445147679324894,
"grad_norm": 0.6120133996009827,
"learning_rate": 0.0015,
"loss": 1.4573,
"step": 6110
},
{
"epoch": 0.6455696202531646,
"grad_norm": 0.6124022006988525,
"learning_rate": 0.0015,
"loss": 1.4533,
"step": 6120
},
{
"epoch": 0.6466244725738397,
"grad_norm": 0.7007266283035278,
"learning_rate": 0.0015,
"loss": 1.4516,
"step": 6130
},
{
"epoch": 0.6476793248945147,
"grad_norm": 1.0392042398452759,
"learning_rate": 0.0015,
"loss": 1.4554,
"step": 6140
},
{
"epoch": 0.6487341772151899,
"grad_norm": 0.8664846420288086,
"learning_rate": 0.0015,
"loss": 1.4672,
"step": 6150
},
{
"epoch": 0.6497890295358649,
"grad_norm": 0.6850740909576416,
"learning_rate": 0.0015,
"loss": 1.4554,
"step": 6160
},
{
"epoch": 0.6508438818565401,
"grad_norm": 0.5882806777954102,
"learning_rate": 0.001487560447745699,
"loss": 1.4321,
"step": 6170
},
{
"epoch": 0.6518987341772152,
"grad_norm": 0.5940200090408325,
"learning_rate": 0.0014670566859713624,
"loss": 1.4553,
"step": 6180
},
{
"epoch": 0.6529535864978903,
"grad_norm": 0.5412749648094177,
"learning_rate": 0.0014468355374162303,
"loss": 1.457,
"step": 6190
},
{
"epoch": 0.6540084388185654,
"grad_norm": 0.6131719946861267,
"learning_rate": 0.0014268931066862504,
"loss": 1.4487,
"step": 6200
},
{
"epoch": 0.6550632911392406,
"grad_norm": 0.5218397974967957,
"learning_rate": 0.0014072255520794614,
"loss": 1.4429,
"step": 6210
},
{
"epoch": 0.6561181434599156,
"grad_norm": 0.5561788082122803,
"learning_rate": 0.0013878290848459301,
"loss": 1.4485,
"step": 6220
},
{
"epoch": 0.6571729957805907,
"grad_norm": 0.5817163586616516,
"learning_rate": 0.0013686999684578874,
"loss": 1.4364,
"step": 6230
},
{
"epoch": 0.6582278481012658,
"grad_norm": 0.5967532396316528,
"learning_rate": 0.001349834517889925,
"loss": 1.4398,
"step": 6240
},
{
"epoch": 0.6592827004219409,
"grad_norm": 0.5231022238731384,
"learning_rate": 0.001331229098909114,
"loss": 1.4459,
"step": 6250
},
{
"epoch": 0.6603375527426161,
"grad_norm": 0.5444517135620117,
"learning_rate": 0.0013128801273749075,
"loss": 1.4358,
"step": 6260
},
{
"epoch": 0.6613924050632911,
"grad_norm": 0.6708015203475952,
"learning_rate": 0.0012947840685486932,
"loss": 1.4421,
"step": 6270
},
{
"epoch": 0.6624472573839663,
"grad_norm": 0.5365439057350159,
"learning_rate": 0.0012769374364128628,
"loss": 1.4387,
"step": 6280
},
{
"epoch": 0.6635021097046413,
"grad_norm": 0.5873969197273254,
"learning_rate": 0.0012593367929992667,
"loss": 1.434,
"step": 6290
},
{
"epoch": 0.6645569620253164,
"grad_norm": 0.6704041361808777,
"learning_rate": 0.0012419787477269257,
"loss": 1.4307,
"step": 6300
},
{
"epoch": 0.6656118143459916,
"grad_norm": 0.5682642459869385,
"learning_rate": 0.0012248599567488698,
"loss": 1.4276,
"step": 6310
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.5359078645706177,
"learning_rate": 0.0012079771223079822,
"loss": 1.4218,
"step": 6320
},
{
"epoch": 0.6677215189873418,
"grad_norm": 0.5815070867538452,
"learning_rate": 0.0011913269921017202,
"loss": 1.43,
"step": 6330
},
{
"epoch": 0.6687763713080169,
"grad_norm": 0.7028301358222961,
"learning_rate": 0.0011749063586555919,
"loss": 1.4236,
"step": 6340
},
{
"epoch": 0.669831223628692,
"grad_norm": 0.54791659116745,
"learning_rate": 0.001158712058705271,
"loss": 1.4266,
"step": 6350
},
{
"epoch": 0.6708860759493671,
"grad_norm": 0.6901172399520874,
"learning_rate": 0.0011427409725872262,
"loss": 1.425,
"step": 6360
},
{
"epoch": 0.6719409282700421,
"grad_norm": 0.5446763634681702,
"learning_rate": 0.00112699002363775,
"loss": 1.4039,
"step": 6370
},
{
"epoch": 0.6729957805907173,
"grad_norm": 0.7008781433105469,
"learning_rate": 0.0011114561776002726,
"loss": 1.4082,
"step": 6380
},
{
"epoch": 0.6740506329113924,
"grad_norm": 0.8061379790306091,
"learning_rate": 0.001096136442040843,
"loss": 1.4207,
"step": 6390
},
{
"epoch": 0.6751054852320675,
"grad_norm": 0.5769936442375183,
"learning_rate": 0.001081027865771668,
"loss": 1.4183,
"step": 6400
},
{
"epoch": 0.6761603375527426,
"grad_norm": 0.567191481590271,
"learning_rate": 0.0010661275382825958,
"loss": 1.417,
"step": 6410
},
{
"epoch": 0.6772151898734177,
"grad_norm": 0.5587610006332397,
"learning_rate": 0.0010514325891804379,
"loss": 1.4027,
"step": 6420
},
{
"epoch": 0.6782700421940928,
"grad_norm": 0.5902115702629089,
"learning_rate": 0.0010369401876360166,
"loss": 1.4093,
"step": 6430
},
{
"epoch": 0.679324894514768,
"grad_norm": 0.6487098336219788,
"learning_rate": 0.001022647541838836,
"loss": 1.4005,
"step": 6440
},
{
"epoch": 0.680379746835443,
"grad_norm": 0.5926433205604553,
"learning_rate": 0.0010085518984592678,
"loss": 1.4005,
"step": 6450
},
{
"epoch": 0.6814345991561181,
"grad_norm": 0.5487666130065918,
"learning_rate": 0.0009946505421181513,
"loss": 1.4101,
"step": 6460
},
{
"epoch": 0.6824894514767933,
"grad_norm": 0.644711434841156,
"learning_rate": 0.0009809407948637044,
"loss": 1.4056,
"step": 6470
},
{
"epoch": 0.6835443037974683,
"grad_norm": 0.739071786403656,
"learning_rate": 0.0009674200156556436,
"loss": 1.4095,
"step": 6480
},
{
"epoch": 0.6845991561181435,
"grad_norm": 0.5890470743179321,
"learning_rate": 0.0009540855998564147,
"loss": 1.4049,
"step": 6490
},
{
"epoch": 0.6856540084388185,
"grad_norm": 0.5881624817848206,
"learning_rate": 0.000940934978729437,
"loss": 1.4179,
"step": 6500
},
{
"epoch": 0.6867088607594937,
"grad_norm": 0.5026538372039795,
"learning_rate": 0.0009279656189442628,
"loss": 1.417,
"step": 6510
},
{
"epoch": 0.6877637130801688,
"grad_norm": 0.5358561873435974,
"learning_rate": 0.0009151750220885573,
"loss": 1.4013,
"step": 6520
},
{
"epoch": 0.6888185654008439,
"grad_norm": 0.5941901206970215,
"learning_rate": 0.0009025607241868057,
"loss": 1.392,
"step": 6530
},
{
"epoch": 0.689873417721519,
"grad_norm": 0.5857279896736145,
"learning_rate": 0.0008901202952256545,
"loss": 1.4004,
"step": 6540
},
{
"epoch": 0.6909282700421941,
"grad_norm": 0.545674741268158,
"learning_rate": 0.0008778513386857928,
"loss": 1.3949,
"step": 6550
},
{
"epoch": 0.6919831223628692,
"grad_norm": 0.6813368201255798,
"learning_rate": 0.0008657514910802905,
"loss": 1.3967,
"step": 6560
},
{
"epoch": 0.6930379746835443,
"grad_norm": 0.5698047280311584,
"learning_rate": 0.0008538184214992943,
"loss": 1.3903,
"step": 6570
},
{
"epoch": 0.6940928270042194,
"grad_norm": 0.536124050617218,
"learning_rate": 0.0008420498311610049,
"loss": 1.3858,
"step": 6580
},
{
"epoch": 0.6951476793248945,
"grad_norm": 0.5726207494735718,
"learning_rate": 0.0008304434529688382,
"loss": 1.3909,
"step": 6590
},
{
"epoch": 0.6962025316455697,
"grad_norm": 0.5136227607727051,
"learning_rate": 0.0008189970510746938,
"loss": 1.3956,
"step": 6600
},
{
"epoch": 0.6972573839662447,
"grad_norm": 0.5583709478378296,
"learning_rate": 0.0008077084204482425,
"loss": 1.379,
"step": 6610
},
{
"epoch": 0.6983122362869199,
"grad_norm": 0.5310684442520142,
"learning_rate": 0.0007965753864521494,
"loss": 1.3919,
"step": 6620
},
{
"epoch": 0.6993670886075949,
"grad_norm": 0.5554360747337341,
"learning_rate": 0.0007855958044231527,
"loss": 1.3832,
"step": 6630
},
{
"epoch": 0.70042194092827,
"grad_norm": 0.6471289396286011,
"learning_rate": 0.000774767559258917,
"loss": 1.3869,
"step": 6640
},
{
"epoch": 0.7014767932489452,
"grad_norm": 0.578295886516571,
"learning_rate": 0.0007640885650105806,
"loss": 1.3816,
"step": 6650
},
{
"epoch": 0.7025316455696202,
"grad_norm": 0.5233500003814697,
"learning_rate": 0.0007535567644809191,
"loss": 1.3798,
"step": 6660
},
{
"epoch": 0.7035864978902954,
"grad_norm": 0.5646123290061951,
"learning_rate": 0.0007431701288280478,
"loss": 1.3825,
"step": 6670
},
{
"epoch": 0.7046413502109705,
"grad_norm": 0.523230791091919,
"learning_rate": 0.0007329266571745864,
"loss": 1.3826,
"step": 6680
},
{
"epoch": 0.7056962025316456,
"grad_norm": 0.478598952293396,
"learning_rate": 0.0007228243762222109,
"loss": 1.3744,
"step": 6690
},
{
"epoch": 0.7067510548523207,
"grad_norm": 0.4953811764717102,
"learning_rate": 0.0007128613398715179,
"loss": 1.3763,
"step": 6700
},
{
"epoch": 0.7078059071729957,
"grad_norm": 0.5036348104476929,
"learning_rate": 0.0007030356288471288,
"loss": 1.3775,
"step": 6710
},
{
"epoch": 0.7088607594936709,
"grad_norm": 0.5722461342811584,
"learning_rate": 0.0006933453503279619,
"loss": 1.3665,
"step": 6720
},
{
"epoch": 0.709915611814346,
"grad_norm": 0.5143836140632629,
"learning_rate": 0.0006837886375825994,
"loss": 1.3756,
"step": 6730
},
{
"epoch": 0.7109704641350211,
"grad_norm": 0.5171010494232178,
"learning_rate": 0.0006743636496096813,
"loss": 1.3769,
"step": 6740
},
{
"epoch": 0.7120253164556962,
"grad_norm": 0.5512645244598389,
"learning_rate": 0.0006650685707832559,
"loss": 1.3809,
"step": 6750
},
{
"epoch": 0.7130801687763713,
"grad_norm": 0.6063659191131592,
"learning_rate": 0.0006559016105030176,
"loss": 1.3747,
"step": 6760
},
{
"epoch": 0.7141350210970464,
"grad_norm": 0.5525716543197632,
"learning_rate": 0.000646861002849367,
"loss": 1.375,
"step": 6770
},
{
"epoch": 0.7151898734177216,
"grad_norm": 0.5591706037521362,
"learning_rate": 0.0006379450062432248,
"loss": 1.3733,
"step": 6780
},
{
"epoch": 0.7162447257383966,
"grad_norm": 0.500237226486206,
"learning_rate": 0.0006291519031105347,
"loss": 1.377,
"step": 6790
},
{
"epoch": 0.7172995780590717,
"grad_norm": 0.5185356736183167,
"learning_rate": 0.00062047999955139,
"loss": 1.3648,
"step": 6800
},
{
"epoch": 0.7183544303797469,
"grad_norm": 0.6288546919822693,
"learning_rate": 0.000611927625013722,
"loss": 1.3672,
"step": 6810
},
{
"epoch": 0.7194092827004219,
"grad_norm": 0.5819681286811829,
"learning_rate": 0.0006034931319714858,
"loss": 1.3668,
"step": 6820
},
{
"epoch": 0.7204641350210971,
"grad_norm": 0.7123441100120544,
"learning_rate": 0.0005951748956072806,
"loss": 1.3657,
"step": 6830
},
{
"epoch": 0.7215189873417721,
"grad_norm": 0.6854354739189148,
"learning_rate": 0.0005869713134993463,
"loss": 1.3554,
"step": 6840
},
{
"epoch": 0.7225738396624473,
"grad_norm": 0.4780556261539459,
"learning_rate": 0.0005788808053128734,
"loss": 1.3649,
"step": 6850
},
{
"epoch": 0.7236286919831224,
"grad_norm": 0.6098446846008301,
"learning_rate": 0.0005709018124955674,
"loss": 1.3602,
"step": 6860
},
{
"epoch": 0.7246835443037974,
"grad_norm": 0.4990808963775635,
"learning_rate": 0.0005630327979774111,
"loss": 1.3556,
"step": 6870
},
{
"epoch": 0.7257383966244726,
"grad_norm": 0.5330736637115479,
"learning_rate": 0.0005552722458745627,
"loss": 1.3639,
"step": 6880
},
{
"epoch": 0.7267932489451476,
"grad_norm": 0.5924967527389526,
"learning_rate": 0.0005476186611973374,
"loss": 1.3533,
"step": 6890
},
{
"epoch": 0.7278481012658228,
"grad_norm": 0.5976269245147705,
"learning_rate": 0.000540070569562213,
"loss": 1.3543,
"step": 6900
},
{
"epoch": 0.7289029535864979,
"grad_norm": 0.5137798190116882,
"learning_rate": 0.0005326265169078048,
"loss": 1.3754,
"step": 6910
},
{
"epoch": 0.729957805907173,
"grad_norm": 0.5295397043228149,
"learning_rate": 0.0005252850692147567,
"loss": 1.3561,
"step": 6920
},
{
"epoch": 0.7310126582278481,
"grad_norm": 0.5149438381195068,
"learning_rate": 0.0005180448122294913,
"loss": 1.3615,
"step": 6930
},
{
"epoch": 0.7320675105485233,
"grad_norm": 0.5110730528831482,
"learning_rate": 0.0005109043511917693,
"loss": 1.3653,
"step": 6940
},
{
"epoch": 0.7331223628691983,
"grad_norm": 0.5516926646232605,
"learning_rate": 0.0005038623105660032,
"loss": 1.3602,
"step": 6950
},
{
"epoch": 0.7341772151898734,
"grad_norm": 0.48696425557136536,
"learning_rate": 0.0004969173337762747,
"loss": 1.3512,
"step": 6960
},
{
"epoch": 0.7352320675105485,
"grad_norm": 0.5534210205078125,
"learning_rate": 0.0004900680829450042,
"loss": 1.3456,
"step": 6970
},
{
"epoch": 0.7362869198312236,
"grad_norm": 0.510179340839386,
"learning_rate": 0.0004833132386352233,
"loss": 1.3499,
"step": 6980
},
{
"epoch": 0.7373417721518988,
"grad_norm": 0.5200173258781433,
"learning_rate": 0.00047665149959639813,
"loss": 1.3498,
"step": 6990
},
{
"epoch": 0.7383966244725738,
"grad_norm": 0.5795905590057373,
"learning_rate": 0.0004700815825137577,
"loss": 1.3611,
"step": 7000
},
{
"epoch": 0.739451476793249,
"grad_norm": 0.4876072406768799,
"learning_rate": 0.00046360222176107584,
"loss": 1.3297,
"step": 7010
},
{
"epoch": 0.740506329113924,
"grad_norm": 0.7506900429725647,
"learning_rate": 0.0004572121691568625,
"loss": 1.3463,
"step": 7020
},
{
"epoch": 0.7415611814345991,
"grad_norm": 0.5398470759391785,
"learning_rate": 0.00045091019372391354,
"loss": 1.3502,
"step": 7030
},
{
"epoch": 0.7426160337552743,
"grad_norm": 0.6205921769142151,
"learning_rate": 0.0004446950814521764,
"loss": 1.3567,
"step": 7040
},
{
"epoch": 0.7436708860759493,
"grad_norm": 0.5857154726982117,
"learning_rate": 0.0004385656350648835,
"loss": 1.3419,
"step": 7050
},
{
"epoch": 0.7447257383966245,
"grad_norm": 0.5818949341773987,
"learning_rate": 0.00043252067378790946,
"loss": 1.347,
"step": 7060
},
{
"epoch": 0.7457805907172996,
"grad_norm": 0.5439556241035461,
"learning_rate": 0.00042655903312230673,
"loss": 1.3549,
"step": 7070
},
{
"epoch": 0.7468354430379747,
"grad_norm": 0.48465341329574585,
"learning_rate": 0.0004206795646199778,
"loss": 1.348,
"step": 7080
},
{
"epoch": 0.7478902953586498,
"grad_norm": 0.5213728547096252,
"learning_rate": 0.0004148811356624379,
"loss": 1.3462,
"step": 7090
},
{
"epoch": 0.7489451476793249,
"grad_norm": 0.523260772228241,
"learning_rate": 0.0004091626292426282,
"loss": 1.3415,
"step": 7100
},
{
"epoch": 0.75,
"grad_norm": 0.5485712885856628,
"learning_rate": 0.0004035229437497357,
"loss": 1.3506,
"step": 7110
},
{
"epoch": 0.7510548523206751,
"grad_norm": 0.5779374241828918,
"learning_rate": 0.00039796099275697986,
"loss": 1.3464,
"step": 7120
},
{
"epoch": 0.7521097046413502,
"grad_norm": 0.5692628622055054,
"learning_rate": 0.0003924757048123232,
"loss": 1.3544,
"step": 7130
},
{
"epoch": 0.7531645569620253,
"grad_norm": 0.5076652765274048,
"learning_rate": 0.0003870660232320675,
"loss": 1.3374,
"step": 7140
},
{
"epoch": 0.7542194092827004,
"grad_norm": 0.6272045373916626,
"learning_rate": 0.000381730905897295,
"loss": 1.3353,
"step": 7150
},
{
"epoch": 0.7552742616033755,
"grad_norm": 0.48361095786094666,
"learning_rate": 0.0003764693250531141,
"loss": 1.3413,
"step": 7160
},
{
"epoch": 0.7563291139240507,
"grad_norm": 0.6693739891052246,
"learning_rate": 0.0003712802671106742,
"loss": 1.351,
"step": 7170
},
{
"epoch": 0.7573839662447257,
"grad_norm": 0.5601663589477539,
"learning_rate": 0.0003661627324519073,
"loss": 1.3289,
"step": 7180
},
{
"epoch": 0.7584388185654009,
"grad_norm": 0.4846477210521698,
"learning_rate": 0.0003611157352369628,
"loss": 1.333,
"step": 7190
},
{
"epoch": 0.759493670886076,
"grad_norm": 0.5156484246253967,
"learning_rate": 0.00035613830321429534,
"loss": 1.3361,
"step": 7200
},
{
"epoch": 0.760548523206751,
"grad_norm": 0.5223991274833679,
"learning_rate": 0.00035122947753337037,
"loss": 1.3405,
"step": 7210
},
{
"epoch": 0.7616033755274262,
"grad_norm": 0.551851749420166,
"learning_rate": 0.0003463883125599521,
"loss": 1.3532,
"step": 7220
},
{
"epoch": 0.7626582278481012,
"grad_norm": 0.5346834063529968,
"learning_rate": 0.00034161387569393647,
"loss": 1.3485,
"step": 7230
},
{
"epoch": 0.7637130801687764,
"grad_norm": 0.4932069480419159,
"learning_rate": 0.00033690524718969593,
"loss": 1.343,
"step": 7240
},
{
"epoch": 0.7647679324894515,
"grad_norm": 0.5038129687309265,
"learning_rate": 0.0003322615199788993,
"loss": 1.3394,
"step": 7250
},
{
"epoch": 0.7658227848101266,
"grad_norm": 0.5022118091583252,
"learning_rate": 0.00032768179949577516,
"loss": 1.3357,
"step": 7260
},
{
"epoch": 0.7668776371308017,
"grad_norm": 0.5111097097396851,
"learning_rate": 0.0003231652035047826,
"loss": 1.3337,
"step": 7270
},
{
"epoch": 0.7679324894514767,
"grad_norm": 0.5380088686943054,
"learning_rate": 0.000318710861930658,
"loss": 1.3351,
"step": 7280
},
{
"epoch": 0.7689873417721519,
"grad_norm": 0.4959379732608795,
"learning_rate": 0.0003143179166908038,
"loss": 1.3425,
"step": 7290
},
{
"epoch": 0.770042194092827,
"grad_norm": 0.6346443295478821,
"learning_rate": 0.00030998552152998834,
"loss": 1.3489,
"step": 7300
},
{
"epoch": 0.7710970464135021,
"grad_norm": 0.4984797239303589,
"learning_rate": 0.00030571284185732276,
"loss": 1.3305,
"step": 7310
},
{
"epoch": 0.7721518987341772,
"grad_norm": 0.5360432863235474,
"learning_rate": 0.0003014990545854864,
"loss": 1.3266,
"step": 7320
},
{
"epoch": 0.7732067510548524,
"grad_norm": 0.5483737587928772,
"learning_rate": 0.0002973433479721675,
"loss": 1.3319,
"step": 7330
},
{
"epoch": 0.7742616033755274,
"grad_norm": 0.5421823263168335,
"learning_rate": 0.00029324492146368906,
"loss": 1.3287,
"step": 7340
},
{
"epoch": 0.7753164556962026,
"grad_norm": 0.477429062128067,
"learning_rate": 0.00028920298554079113,
"loss": 1.3262,
"step": 7350
},
{
"epoch": 0.7763713080168776,
"grad_norm": 0.5185375213623047,
"learning_rate": 0.00028521676156653756,
"loss": 1.3319,
"step": 7360
},
{
"epoch": 0.7774261603375527,
"grad_norm": 0.5309724807739258,
"learning_rate": 0.00028128548163632006,
"loss": 1.334,
"step": 7370
},
{
"epoch": 0.7784810126582279,
"grad_norm": 0.5046623349189758,
"learning_rate": 0.0002774083884299292,
"loss": 1.3228,
"step": 7380
},
{
"epoch": 0.7795358649789029,
"grad_norm": 0.4987468421459198,
"learning_rate": 0.0002735847350656645,
"loss": 1.3316,
"step": 7390
},
{
"epoch": 0.7805907172995781,
"grad_norm": 0.559971034526825,
"learning_rate": 0.0002698137849564556,
"loss": 1.3372,
"step": 7400
},
{
"epoch": 0.7816455696202531,
"grad_norm": 0.521664023399353,
"learning_rate": 0.0002660948116679665,
"loss": 1.3195,
"step": 7410
},
{
"epoch": 0.7827004219409283,
"grad_norm": 0.52082359790802,
"learning_rate": 0.00026242709877865493,
"loss": 1.3255,
"step": 7420
},
{
"epoch": 0.7837552742616034,
"grad_norm": 0.5003120303153992,
"learning_rate": 0.00025880993974176204,
"loss": 1.3109,
"step": 7430
},
{
"epoch": 0.7848101265822784,
"grad_norm": 0.48625874519348145,
"learning_rate": 0.0002552426377492028,
"loss": 1.3151,
"step": 7440
},
{
"epoch": 0.7858649789029536,
"grad_norm": 0.5131382942199707,
"learning_rate": 0.0002517245055973337,
"loss": 1.3174,
"step": 7450
},
{
"epoch": 0.7869198312236287,
"grad_norm": 0.4839317500591278,
"learning_rate": 0.00024825486555456975,
"loss": 1.3272,
"step": 7460
},
{
"epoch": 0.7879746835443038,
"grad_norm": 0.5233309268951416,
"learning_rate": 0.00024483304923082663,
"loss": 1.3184,
"step": 7470
},
{
"epoch": 0.7890295358649789,
"grad_norm": 0.5166673064231873,
"learning_rate": 0.0002414583974487624,
"loss": 1.3239,
"step": 7480
},
{
"epoch": 0.790084388185654,
"grad_norm": 0.4803820550441742,
"learning_rate": 0.00023813026011679372,
"loss": 1.3315,
"step": 7490
},
{
"epoch": 0.7911392405063291,
"grad_norm": 0.5475544333457947,
"learning_rate": 0.0002348479961038625,
"loss": 1.3259,
"step": 7500
},
{
"epoch": 0.7921940928270043,
"grad_norm": 0.5300559997558594,
"learning_rate": 0.00023161097311592867,
"loss": 1.3318,
"step": 7510
},
{
"epoch": 0.7932489451476793,
"grad_norm": 0.5053457021713257,
"learning_rate": 0.00022841856757416538,
"loss": 1.3171,
"step": 7520
},
{
"epoch": 0.7943037974683544,
"grad_norm": 0.4950859546661377,
"learning_rate": 0.0002252701644948328,
"loss": 1.3172,
"step": 7530
},
{
"epoch": 0.7953586497890295,
"grad_norm": 0.5395722389221191,
"learning_rate": 0.00022216515737080817,
"loss": 1.3154,
"step": 7540
},
{
"epoch": 0.7964135021097046,
"grad_norm": 0.49658456444740295,
"learning_rate": 0.00021910294805474833,
"loss": 1.3122,
"step": 7550
},
{
"epoch": 0.7974683544303798,
"grad_norm": 0.6146180033683777,
"learning_rate": 0.0002160829466438629,
"loss": 1.3174,
"step": 7560
},
{
"epoch": 0.7985232067510548,
"grad_norm": 0.5398202538490295,
"learning_rate": 0.00021310457136627562,
"loss": 1.3262,
"step": 7570
},
{
"epoch": 0.79957805907173,
"grad_norm": 0.4800944924354553,
"learning_rate": 0.00021016724846895213,
"loss": 1.3232,
"step": 7580
},
{
"epoch": 0.8006329113924051,
"grad_norm": 0.4960499703884125,
"learning_rate": 0.00020727041210717235,
"loss": 1.3119,
"step": 7590
},
{
"epoch": 0.8016877637130801,
"grad_norm": 0.48392316699028015,
"learning_rate": 0.00020441350423552624,
"loss": 1.3171,
"step": 7600
},
{
"epoch": 0.8027426160337553,
"grad_norm": 0.5017353296279907,
"learning_rate": 0.00020159597450041257,
"loss": 1.3295,
"step": 7610
},
{
"epoch": 0.8037974683544303,
"grad_norm": 0.5136876702308655,
"learning_rate": 0.00019881728013401842,
"loss": 1.3025,
"step": 7620
},
{
"epoch": 0.8048523206751055,
"grad_norm": 0.5149848461151123,
"learning_rate": 0.00019607688584976116,
"loss": 1.3273,
"step": 7630
},
{
"epoch": 0.8059071729957806,
"grad_norm": 0.564681351184845,
"learning_rate": 0.00019337426373917076,
"loss": 1.3176,
"step": 7640
},
{
"epoch": 0.8069620253164557,
"grad_norm": 0.48444536328315735,
"learning_rate": 0.00019070889317019375,
"loss": 1.3318,
"step": 7650
},
{
"epoch": 0.8080168776371308,
"grad_norm": 0.4958171844482422,
"learning_rate": 0.00018808026068689883,
"loss": 1.3106,
"step": 7660
},
{
"epoch": 0.8090717299578059,
"grad_norm": 0.5000611543655396,
"learning_rate": 0.00018548785991056508,
"loss": 1.3174,
"step": 7670
},
{
"epoch": 0.810126582278481,
"grad_norm": 0.5784411430358887,
"learning_rate": 0.00018293119144213328,
"loss": 1.3293,
"step": 7680
},
{
"epoch": 0.8111814345991561,
"grad_norm": 0.5327420234680176,
"learning_rate": 0.00018040976276600176,
"loss": 1.3044,
"step": 7690
},
{
"epoch": 0.8122362869198312,
"grad_norm": 0.4729915261268616,
"learning_rate": 0.00017792308815514854,
"loss": 1.3132,
"step": 7700
},
{
"epoch": 0.8132911392405063,
"grad_norm": 0.6112768650054932,
"learning_rate": 0.00017547068857756104,
"loss": 1.3105,
"step": 7710
},
{
"epoch": 0.8143459915611815,
"grad_norm": 0.5344731211662292,
"learning_rate": 0.00017305209160395547,
"loss": 1.3161,
"step": 7720
},
{
"epoch": 0.8154008438818565,
"grad_norm": 0.5098864436149597,
"learning_rate": 0.00017066683131676825,
"loss": 1.3212,
"step": 7730
},
{
"epoch": 0.8164556962025317,
"grad_norm": 0.4790191054344177,
"learning_rate": 0.00016831444822040207,
"loss": 1.3221,
"step": 7740
},
{
"epoch": 0.8175105485232067,
"grad_norm": 0.5015791654586792,
"learning_rate": 0.00016599448915270845,
"loss": 1.3077,
"step": 7750
},
{
"epoch": 0.8185654008438819,
"grad_norm": 0.5503226518630981,
"learning_rate": 0.000163706507197691,
"loss": 1.3087,
"step": 7760
},
{
"epoch": 0.819620253164557,
"grad_norm": 0.503747284412384,
"learning_rate": 0.0001614500615994117,
"loss": 1.3168,
"step": 7770
},
{
"epoch": 0.820675105485232,
"grad_norm": 0.5768018364906311,
"learning_rate": 0.00015922471767708377,
"loss": 1.3081,
"step": 7780
},
{
"epoch": 0.8217299578059072,
"grad_norm": 0.5573811531066895,
"learning_rate": 0.00015703004674133498,
"loss": 1.3246,
"step": 7790
},
{
"epoch": 0.8227848101265823,
"grad_norm": 0.4715335965156555,
"learning_rate": 0.00015486562601162512,
"loss": 1.3138,
"step": 7800
},
{
"epoch": 0.8238396624472574,
"grad_norm": 0.5189917683601379,
"learning_rate": 0.0001527310385348017,
"loss": 1.3134,
"step": 7810
},
{
"epoch": 0.8248945147679325,
"grad_norm": 0.48335981369018555,
"learning_rate": 0.00015062587310477813,
"loss": 1.3191,
"step": 7820
},
{
"epoch": 0.8259493670886076,
"grad_norm": 0.4930039048194885,
"learning_rate": 0.00014854972418331948,
"loss": 1.3087,
"step": 7830
},
{
"epoch": 0.8270042194092827,
"grad_norm": 0.48580029606819153,
"learning_rate": 0.00014650219182191934,
"loss": 1.2964,
"step": 7840
},
{
"epoch": 0.8280590717299579,
"grad_norm": 0.5698385834693909,
"learning_rate": 0.00014448288158475423,
"loss": 1.306,
"step": 7850
},
{
"epoch": 0.8291139240506329,
"grad_norm": 0.4678795039653778,
"learning_rate": 0.0001424914044726995,
"loss": 1.2944,
"step": 7860
},
{
"epoch": 0.830168776371308,
"grad_norm": 0.5344630479812622,
"learning_rate": 0.0001405273768483926,
"loss": 1.3067,
"step": 7870
},
{
"epoch": 0.8312236286919831,
"grad_norm": 0.5643693804740906,
"learning_rate": 0.0001385904203623296,
"loss": 1.3246,
"step": 7880
},
{
"epoch": 0.8322784810126582,
"grad_norm": 0.48063284158706665,
"learning_rate": 0.00013668016187997964,
"loss": 1.3212,
"step": 7890
},
{
"epoch": 0.8333333333333334,
"grad_norm": 0.4919438660144806,
"learning_rate": 0.0001347962334099052,
"loss": 1.301,
"step": 7900
},
{
"epoch": 0.8343881856540084,
"grad_norm": 0.6470403671264648,
"learning_rate": 0.00013293827203287141,
"loss": 1.311,
"step": 7910
},
{
"epoch": 0.8354430379746836,
"grad_norm": 0.5314783453941345,
"learning_rate": 0.00013110591983193424,
"loss": 1.3089,
"step": 7920
},
{
"epoch": 0.8364978902953587,
"grad_norm": 0.4908827245235443,
"learning_rate": 0.00012929882382349103,
"loss": 1.3196,
"step": 7930
},
{
"epoch": 0.8375527426160337,
"grad_norm": 0.5107324719429016,
"learning_rate": 0.0001275166358892821,
"loss": 1.3138,
"step": 7940
},
{
"epoch": 0.8386075949367089,
"grad_norm": 0.4909266233444214,
"learning_rate": 0.00012575901270932944,
"loss": 1.3169,
"step": 7950
},
{
"epoch": 0.8396624472573839,
"grad_norm": 0.481869637966156,
"learning_rate": 0.00012402561569579935,
"loss": 1.3015,
"step": 7960
},
{
"epoch": 0.8407172995780591,
"grad_norm": 0.4808231294155121,
"learning_rate": 0.00012231611092777743,
"loss": 1.3028,
"step": 7970
},
{
"epoch": 0.8417721518987342,
"grad_norm": 0.47872987389564514,
"learning_rate": 0.00012063016908694192,
"loss": 1.2992,
"step": 7980
},
{
"epoch": 0.8428270042194093,
"grad_norm": 0.6007379293441772,
"learning_rate": 0.00011896746539412405,
"loss": 1.311,
"step": 7990
},
{
"epoch": 0.8438818565400844,
"grad_norm": 0.5131444931030273,
"learning_rate": 0.00011732767954674264,
"loss": 1.3077,
"step": 8000
},
{
"epoch": 0.8449367088607594,
"grad_norm": 0.49039360880851746,
"learning_rate": 0.00011571049565710122,
"loss": 1.313,
"step": 8010
},
{
"epoch": 0.8459915611814346,
"grad_norm": 0.5120943188667297,
"learning_rate": 0.00011411560219153552,
"loss": 1.3057,
"step": 8020
},
{
"epoch": 0.8470464135021097,
"grad_norm": 0.49493280053138733,
"learning_rate": 0.0001125426919103997,
"loss": 1.3044,
"step": 8030
},
{
"epoch": 0.8481012658227848,
"grad_norm": 0.5061576962471008,
"learning_rate": 0.00011099146180887992,
"loss": 1.317,
"step": 8040
},
{
"epoch": 0.8491561181434599,
"grad_norm": 0.49946215748786926,
"learning_rate": 0.0001094616130586235,
"loss": 1.2981,
"step": 8050
},
{
"epoch": 0.8502109704641351,
"grad_norm": 0.49331456422805786,
"learning_rate": 0.00010795285095017282,
"loss": 1.3066,
"step": 8060
},
{
"epoch": 0.8512658227848101,
"grad_norm": 0.5038382411003113,
"learning_rate": 0.00010646488483619263,
"loss": 1.3102,
"step": 8070
},
{
"epoch": 0.8523206751054853,
"grad_norm": 0.5167850255966187,
"learning_rate": 0.00010499742807547978,
"loss": 1.313,
"step": 8080
},
{
"epoch": 0.8533755274261603,
"grad_norm": 0.48715218901634216,
"learning_rate": 0.0001035501979777448,
"loss": 1.3002,
"step": 8090
},
{
"epoch": 0.8544303797468354,
"grad_norm": 0.5803899765014648,
"learning_rate": 0.00010212291574915464,
"loss": 1.2952,
"step": 8100
},
{
"epoch": 0.8554852320675106,
"grad_norm": 0.5617555975914001,
"learning_rate": 0.00010071530643862575,
"loss": 1.3108,
"step": 8110
},
{
"epoch": 0.8565400843881856,
"grad_norm": 0.48981258273124695,
"learning_rate": 9.932709888485788e-05,
"loss": 1.3033,
"step": 8120
},
{
"epoch": 0.8575949367088608,
"grad_norm": 0.4906420409679413,
"learning_rate": 9.79580256640974e-05,
"loss": 1.3025,
"step": 8130
},
{
"epoch": 0.8586497890295358,
"grad_norm": 0.508507251739502,
"learning_rate": 9.660782303862107e-05,
"loss": 1.3033,
"step": 8140
},
{
"epoch": 0.859704641350211,
"grad_norm": 0.48973771929740906,
"learning_rate": 9.527623090592962e-05,
"loss": 1.2991,
"step": 8150
},
{
"epoch": 0.8607594936708861,
"grad_norm": 0.4791290760040283,
"learning_rate": 9.396299274864176e-05,
"loss": 1.3099,
"step": 8160
},
{
"epoch": 0.8618143459915611,
"grad_norm": 0.519798755645752,
"learning_rate": 9.266785558507876e-05,
"loss": 1.3003,
"step": 8170
},
{
"epoch": 0.8628691983122363,
"grad_norm": 0.5567184090614319,
"learning_rate": 9.139056992053017e-05,
"loss": 1.3079,
"step": 8180
},
{
"epoch": 0.8639240506329114,
"grad_norm": 0.5259215235710144,
"learning_rate": 9.01308896991912e-05,
"loss": 1.3018,
"step": 8190
},
{
"epoch": 0.8649789029535865,
"grad_norm": 0.4801322817802429,
"learning_rate": 8.88885722567627e-05,
"loss": 1.3117,
"step": 8200
},
{
"epoch": 0.8660337552742616,
"grad_norm": 0.5169273614883423,
"learning_rate": 8.766337827370438e-05,
"loss": 1.3048,
"step": 8210
},
{
"epoch": 0.8670886075949367,
"grad_norm": 0.4925152063369751,
"learning_rate": 8.645507172913238e-05,
"loss": 1.3118,
"step": 8220
},
{
"epoch": 0.8681434599156118,
"grad_norm": 0.46642351150512695,
"learning_rate": 8.52634198553523e-05,
"loss": 1.3079,
"step": 8230
},
{
"epoch": 0.869198312236287,
"grad_norm": 0.4546700716018677,
"learning_rate": 8.408819309301891e-05,
"loss": 1.3047,
"step": 8240
},
{
"epoch": 0.870253164556962,
"grad_norm": 0.48208338022232056,
"learning_rate": 8.292916504691398e-05,
"loss": 1.3171,
"step": 8250
},
{
"epoch": 0.8713080168776371,
"grad_norm": 0.5228366255760193,
"learning_rate": 8.178611244233354e-05,
"loss": 1.309,
"step": 8260
},
{
"epoch": 0.8723628691983122,
"grad_norm": 0.5089304447174072,
"learning_rate": 8.065881508207636e-05,
"loss": 1.3059,
"step": 8270
},
{
"epoch": 0.8734177215189873,
"grad_norm": 0.4905509650707245,
"learning_rate": 7.954705580402525e-05,
"loss": 1.2905,
"step": 8280
},
{
"epoch": 0.8744725738396625,
"grad_norm": 0.509059488773346,
"learning_rate": 7.845062043931299e-05,
"loss": 1.2907,
"step": 8290
},
{
"epoch": 0.8755274261603375,
"grad_norm": 0.4941963255405426,
"learning_rate": 7.736929777106499e-05,
"loss": 1.314,
"step": 8300
},
{
"epoch": 0.8765822784810127,
"grad_norm": 0.520182192325592,
"learning_rate": 7.630287949371051e-05,
"loss": 1.2939,
"step": 8310
},
{
"epoch": 0.8776371308016878,
"grad_norm": 0.4690620005130768,
"learning_rate": 7.525116017285479e-05,
"loss": 1.3133,
"step": 8320
},
{
"epoch": 0.8786919831223629,
"grad_norm": 0.4872656762599945,
"learning_rate": 7.421393720570416e-05,
"loss": 1.3059,
"step": 8330
},
{
"epoch": 0.879746835443038,
"grad_norm": 0.5117143988609314,
"learning_rate": 7.319101078203692e-05,
"loss": 1.2936,
"step": 8340
},
{
"epoch": 0.880801687763713,
"grad_norm": 0.48863887786865234,
"learning_rate": 7.218218384571176e-05,
"loss": 1.2965,
"step": 8350
},
{
"epoch": 0.8818565400843882,
"grad_norm": 0.47456663846969604,
"learning_rate": 7.118726205670702e-05,
"loss": 1.2961,
"step": 8360
},
{
"epoch": 0.8829113924050633,
"grad_norm": 0.4854566156864166,
"learning_rate": 7.020605375368314e-05,
"loss": 1.3012,
"step": 8370
},
{
"epoch": 0.8839662447257384,
"grad_norm": 0.48545241355895996,
"learning_rate": 6.923836991706108e-05,
"loss": 1.3085,
"step": 8380
},
{
"epoch": 0.8850210970464135,
"grad_norm": 0.5120490193367004,
"learning_rate": 6.828402413260965e-05,
"loss": 1.2987,
"step": 8390
},
{
"epoch": 0.8860759493670886,
"grad_norm": 0.5022537708282471,
"learning_rate": 6.73428325555347e-05,
"loss": 1.3005,
"step": 8400
},
{
"epoch": 0.8871308016877637,
"grad_norm": 0.48437222838401794,
"learning_rate": 6.641461387506347e-05,
"loss": 1.3165,
"step": 8410
},
{
"epoch": 0.8881856540084389,
"grad_norm": 0.495381236076355,
"learning_rate": 6.549918927951679e-05,
"loss": 1.3054,
"step": 8420
},
{
"epoch": 0.8892405063291139,
"grad_norm": 0.5085771679878235,
"learning_rate": 6.459638242186298e-05,
"loss": 1.302,
"step": 8430
},
{
"epoch": 0.890295358649789,
"grad_norm": 0.46814489364624023,
"learning_rate": 6.370601938574637e-05,
"loss": 1.2959,
"step": 8440
},
{
"epoch": 0.8913502109704642,
"grad_norm": 0.5327754020690918,
"learning_rate": 6.282792865198421e-05,
"loss": 1.3026,
"step": 8450
},
{
"epoch": 0.8924050632911392,
"grad_norm": 0.5053880214691162,
"learning_rate": 6.196194106552512e-05,
"loss": 1.2995,
"step": 8460
},
{
"epoch": 0.8934599156118144,
"grad_norm": 0.49899402260780334,
"learning_rate": 6.110788980286329e-05,
"loss": 1.3018,
"step": 8470
},
{
"epoch": 0.8945147679324894,
"grad_norm": 0.5030297040939331,
"learning_rate": 6.026561033990159e-05,
"loss": 1.2969,
"step": 8480
},
{
"epoch": 0.8955696202531646,
"grad_norm": 0.5092901587486267,
"learning_rate": 5.943494042025771e-05,
"loss": 1.3012,
"step": 8490
},
{
"epoch": 0.8966244725738397,
"grad_norm": 0.4743645191192627,
"learning_rate": 5.8615720024007174e-05,
"loss": 1.2892,
"step": 8500
},
{
"epoch": 0.8976793248945147,
"grad_norm": 0.5087531208992004,
"learning_rate": 5.780779133685717e-05,
"loss": 1.3017,
"step": 8510
},
{
"epoch": 0.8987341772151899,
"grad_norm": 0.49778199195861816,
"learning_rate": 5.701099871974525e-05,
"loss": 1.297,
"step": 8520
},
{
"epoch": 0.8997890295358649,
"grad_norm": 0.5024697780609131,
"learning_rate": 5.6225188678857095e-05,
"loss": 1.3115,
"step": 8530
},
{
"epoch": 0.9008438818565401,
"grad_norm": 0.47762638330459595,
"learning_rate": 5.545020983605749e-05,
"loss": 1.3067,
"step": 8540
},
{
"epoch": 0.9018987341772152,
"grad_norm": 0.517483115196228,
"learning_rate": 5.4685912899728965e-05,
"loss": 1.2985,
"step": 8550
},
{
"epoch": 0.9029535864978903,
"grad_norm": 0.4921302795410156,
"learning_rate": 5.39321506360123e-05,
"loss": 1.2911,
"step": 8560
},
{
"epoch": 0.9040084388185654,
"grad_norm": 0.5750061869621277,
"learning_rate": 5.318877784044342e-05,
"loss": 1.3089,
"step": 8570
},
{
"epoch": 0.9050632911392406,
"grad_norm": 0.5050473213195801,
"learning_rate": 5.245565130998124e-05,
"loss": 1.3004,
"step": 8580
},
{
"epoch": 0.9061181434599156,
"grad_norm": 0.5933470129966736,
"learning_rate": 5.173262981542119e-05,
"loss": 1.3042,
"step": 8590
},
{
"epoch": 0.9071729957805907,
"grad_norm": 0.5033522248268127,
"learning_rate": 5.101957407418877e-05,
"loss": 1.296,
"step": 8600
},
{
"epoch": 0.9082278481012658,
"grad_norm": 0.536758542060852,
"learning_rate": 5.0316346723508287e-05,
"loss": 1.3004,
"step": 8610
},
{
"epoch": 0.9092827004219409,
"grad_norm": 0.5102678537368774,
"learning_rate": 4.962281229394129e-05,
"loss": 1.2941,
"step": 8620
},
{
"epoch": 0.9103375527426161,
"grad_norm": 0.4891055226325989,
"learning_rate": 4.893883718328984e-05,
"loss": 1.306,
"step": 8630
},
{
"epoch": 0.9113924050632911,
"grad_norm": 0.5518621206283569,
"learning_rate": 4.8264289630859386e-05,
"loss": 1.2851,
"step": 8640
},
{
"epoch": 0.9124472573839663,
"grad_norm": 0.49100545048713684,
"learning_rate": 4.759903969207646e-05,
"loss": 1.2913,
"step": 8650
},
{
"epoch": 0.9135021097046413,
"grad_norm": 0.5603622794151306,
"learning_rate": 4.694295921345623e-05,
"loss": 1.2918,
"step": 8660
},
{
"epoch": 0.9145569620253164,
"grad_norm": 0.4789530336856842,
"learning_rate": 4.629592180791501e-05,
"loss": 1.2991,
"step": 8670
},
{
"epoch": 0.9156118143459916,
"grad_norm": 0.46517252922058105,
"learning_rate": 4.565780283042316e-05,
"loss": 1.3083,
"step": 8680
},
{
"epoch": 0.9166666666666666,
"grad_norm": 0.49385249614715576,
"learning_rate": 4.502847935399348e-05,
"loss": 1.3013,
"step": 8690
},
{
"epoch": 0.9177215189873418,
"grad_norm": 0.49176329374313354,
"learning_rate": 4.440783014600059e-05,
"loss": 1.2986,
"step": 8700
},
{
"epoch": 0.9187763713080169,
"grad_norm": 0.4797545373439789,
"learning_rate": 4.3795735644826776e-05,
"loss": 1.2992,
"step": 8710
},
{
"epoch": 0.919831223628692,
"grad_norm": 0.4724355638027191,
"learning_rate": 4.319207793682965e-05,
"loss": 1.2912,
"step": 8720
},
{
"epoch": 0.9208860759493671,
"grad_norm": 0.5069165825843811,
"learning_rate": 4.259674073362732e-05,
"loss": 1.3053,
"step": 8730
},
{
"epoch": 0.9219409282700421,
"grad_norm": 0.49566447734832764,
"learning_rate": 4.200960934969664e-05,
"loss": 1.2926,
"step": 8740
},
{
"epoch": 0.9229957805907173,
"grad_norm": 0.4764130115509033,
"learning_rate": 4.143057068028024e-05,
"loss": 1.2909,
"step": 8750
},
{
"epoch": 0.9240506329113924,
"grad_norm": 0.5429271459579468,
"learning_rate": 4.0859513179598096e-05,
"loss": 1.2811,
"step": 8760
},
{
"epoch": 0.9251054852320675,
"grad_norm": 0.484330415725708,
"learning_rate": 4.02963268393593e-05,
"loss": 1.2911,
"step": 8770
},
{
"epoch": 0.9261603375527426,
"grad_norm": 0.4688258469104767,
"learning_rate": 3.974090316757029e-05,
"loss": 1.2997,
"step": 8780
},
{
"epoch": 0.9272151898734177,
"grad_norm": 0.48302310705184937,
"learning_rate": 3.919313516763478e-05,
"loss": 1.2937,
"step": 8790
},
{
"epoch": 0.9282700421940928,
"grad_norm": 0.49148064851760864,
"learning_rate": 3.8652917317742106e-05,
"loss": 1.2903,
"step": 8800
},
{
"epoch": 0.929324894514768,
"grad_norm": 0.4975200593471527,
"learning_rate": 3.812014555053955e-05,
"loss": 1.3093,
"step": 8810
},
{
"epoch": 0.930379746835443,
"grad_norm": 0.47071343660354614,
"learning_rate": 3.759471723308477e-05,
"loss": 1.2938,
"step": 8820
},
{
"epoch": 0.9314345991561181,
"grad_norm": 0.49124982953071594,
"learning_rate": 3.707653114707471e-05,
"loss": 1.311,
"step": 8830
},
{
"epoch": 0.9324894514767933,
"grad_norm": 0.4726497232913971,
"learning_rate": 3.6565487469346904e-05,
"loss": 1.2979,
"step": 8840
},
{
"epoch": 0.9335443037974683,
"grad_norm": 0.5397368669509888,
"learning_rate": 3.606148775264958e-05,
"loss": 1.2955,
"step": 8850
},
{
"epoch": 0.9345991561181435,
"grad_norm": 0.5343860983848572,
"learning_rate": 3.5564434906676834e-05,
"loss": 1.2958,
"step": 8860
},
{
"epoch": 0.9356540084388185,
"grad_norm": 0.47638171911239624,
"learning_rate": 3.507423317936521e-05,
"loss": 1.3009,
"step": 8870
},
{
"epoch": 0.9367088607594937,
"grad_norm": 0.49189749360084534,
"learning_rate": 3.4590788138448004e-05,
"loss": 1.3024,
"step": 8880
},
{
"epoch": 0.9377637130801688,
"grad_norm": 0.49116694927215576,
"learning_rate": 3.411400665326393e-05,
"loss": 1.3019,
"step": 8890
},
{
"epoch": 0.9388185654008439,
"grad_norm": 0.4798203110694885,
"learning_rate": 3.364379687681642e-05,
"loss": 1.2918,
"step": 8900
},
{
"epoch": 0.939873417721519,
"grad_norm": 0.5427767634391785,
"learning_rate": 3.31800682280803e-05,
"loss": 1.2905,
"step": 8910
},
{
"epoch": 0.9409282700421941,
"grad_norm": 0.4585849940776825,
"learning_rate": 3.272273137455225e-05,
"loss": 1.3013,
"step": 8920
},
{
"epoch": 0.9419831223628692,
"grad_norm": 0.49122941493988037,
"learning_rate": 3.227169821504187e-05,
"loss": 1.293,
"step": 8930
},
{
"epoch": 0.9430379746835443,
"grad_norm": 0.4745127856731415,
"learning_rate": 3.182688186269985e-05,
"loss": 1.2973,
"step": 8940
},
{
"epoch": 0.9440928270042194,
"grad_norm": 0.4775674641132355,
"learning_rate": 3.138819662828018e-05,
"loss": 1.2961,
"step": 8950
},
{
"epoch": 0.9451476793248945,
"grad_norm": 0.5113229751586914,
"learning_rate": 3.095555800363297e-05,
"loss": 1.3,
"step": 8960
},
{
"epoch": 0.9462025316455697,
"grad_norm": 0.5690352320671082,
"learning_rate": 3.052888264542484e-05,
"loss": 1.2948,
"step": 8970
},
{
"epoch": 0.9472573839662447,
"grad_norm": 0.5642811059951782,
"learning_rate": 3.0108088359083675e-05,
"loss": 1.294,
"step": 8980
},
{
"epoch": 0.9483122362869199,
"grad_norm": 0.47393810749053955,
"learning_rate": 2.9693094082964775e-05,
"loss": 1.2983,
"step": 8990
},
{
"epoch": 0.9493670886075949,
"grad_norm": 0.4721105098724365,
"learning_rate": 2.928381987273507e-05,
"loss": 1.2928,
"step": 9000
},
{
"epoch": 0.95042194092827,
"grad_norm": 0.48451584577560425,
"learning_rate": 2.8880186885972716e-05,
"loss": 1.2901,
"step": 9010
},
{
"epoch": 0.9514767932489452,
"grad_norm": 0.4690127372741699,
"learning_rate": 2.8482117366978935e-05,
"loss": 1.292,
"step": 9020
},
{
"epoch": 0.9525316455696202,
"grad_norm": 0.4970489740371704,
"learning_rate": 2.808953463179918e-05,
"loss": 1.3125,
"step": 9030
},
{
"epoch": 0.9535864978902954,
"grad_norm": 0.4926052689552307,
"learning_rate": 2.770236305345076e-05,
"loss": 1.287,
"step": 9040
},
{
"epoch": 0.9546413502109705,
"grad_norm": 0.48225638270378113,
"learning_rate": 2.732052804735409e-05,
"loss": 1.2978,
"step": 9050
},
{
"epoch": 0.9556962025316456,
"grad_norm": 0.48080137372016907,
"learning_rate": 2.6943956056964773e-05,
"loss": 1.2997,
"step": 9060
},
{
"epoch": 0.9567510548523207,
"grad_norm": 0.46203795075416565,
"learning_rate": 2.6572574539603643e-05,
"loss": 1.3011,
"step": 9070
},
{
"epoch": 0.9578059071729957,
"grad_norm": 0.48563188314437866,
"learning_rate": 2.6206311952482224e-05,
"loss": 1.2995,
"step": 9080
},
{
"epoch": 0.9588607594936709,
"grad_norm": 0.49966487288475037,
"learning_rate": 2.584509773892073e-05,
"loss": 1.2996,
"step": 9090
},
{
"epoch": 0.959915611814346,
"grad_norm": 0.4667785167694092,
"learning_rate": 2.5488862314756066e-05,
"loss": 1.281,
"step": 9100
},
{
"epoch": 0.9609704641350211,
"grad_norm": 0.47818562388420105,
"learning_rate": 2.513753705493713e-05,
"loss": 1.2925,
"step": 9110
},
{
"epoch": 0.9620253164556962,
"grad_norm": 0.5413153767585754,
"learning_rate": 2.4791054280304972e-05,
"loss": 1.302,
"step": 9120
},
{
"epoch": 0.9630801687763713,
"grad_norm": 0.5045819282531738,
"learning_rate": 2.4449347244555043e-05,
"loss": 1.2864,
"step": 9130
},
{
"epoch": 0.9641350210970464,
"grad_norm": 0.5392649173736572,
"learning_rate": 2.4112350121379255e-05,
"loss": 1.3055,
"step": 9140
},
{
"epoch": 0.9651898734177216,
"grad_norm": 0.5079522728919983,
"learning_rate": 2.3779997991785207e-05,
"loss": 1.2924,
"step": 9150
},
{
"epoch": 0.9662447257383966,
"grad_norm": 0.47834473848342896,
"learning_rate": 2.3452226831590232e-05,
"loss": 1.3007,
"step": 9160
},
{
"epoch": 0.9672995780590717,
"grad_norm": 0.49888527393341064,
"learning_rate": 2.3128973499087785e-05,
"loss": 1.2979,
"step": 9170
},
{
"epoch": 0.9683544303797469,
"grad_norm": 0.47910282015800476,
"learning_rate": 2.2810175722883866e-05,
"loss": 1.2965,
"step": 9180
},
{
"epoch": 0.9694092827004219,
"grad_norm": 0.48448124527931213,
"learning_rate": 2.2495772089901067e-05,
"loss": 1.2904,
"step": 9190
},
{
"epoch": 0.9704641350210971,
"grad_norm": 0.4972832202911377,
"learning_rate": 2.218570203354799e-05,
"loss": 1.2943,
"step": 9200
},
{
"epoch": 0.9715189873417721,
"grad_norm": 0.498006671667099,
"learning_rate": 2.187990582205175e-05,
"loss": 1.2949,
"step": 9210
},
{
"epoch": 0.9725738396624473,
"grad_norm": 0.5123701691627502,
"learning_rate": 2.157832454695122e-05,
"loss": 1.2907,
"step": 9220
},
{
"epoch": 0.9736286919831224,
"grad_norm": 0.46372082829475403,
"learning_rate": 2.1280900111748943e-05,
"loss": 1.2827,
"step": 9230
},
{
"epoch": 0.9746835443037974,
"grad_norm": 0.49028825759887695,
"learning_rate": 2.0987575220719476e-05,
"loss": 1.2789,
"step": 9240
},
{
"epoch": 0.9757383966244726,
"grad_norm": 0.48287495970726013,
"learning_rate": 2.069829336787193e-05,
"loss": 1.2904,
"step": 9250
},
{
"epoch": 0.9767932489451476,
"grad_norm": 0.4909447729587555,
"learning_rate": 2.0412998826064695e-05,
"loss": 1.288,
"step": 9260
},
{
"epoch": 0.9778481012658228,
"grad_norm": 0.4949563145637512,
"learning_rate": 2.0131636636270178e-05,
"loss": 1.2985,
"step": 9270
},
{
"epoch": 0.9789029535864979,
"grad_norm": 0.4889209270477295,
"learning_rate": 1.9854152596987523e-05,
"loss": 1.2862,
"step": 9280
},
{
"epoch": 0.979957805907173,
"grad_norm": 0.4816690981388092,
"learning_rate": 1.9580493253801253e-05,
"loss": 1.293,
"step": 9290
},
{
"epoch": 0.9810126582278481,
"grad_norm": 0.47251787781715393,
"learning_rate": 1.9310605889083842e-05,
"loss": 1.2914,
"step": 9300
},
{
"epoch": 0.9820675105485233,
"grad_norm": 0.5345935225486755,
"learning_rate": 1.904443851184018e-05,
"loss": 1.2923,
"step": 9310
},
{
"epoch": 0.9831223628691983,
"grad_norm": 0.48072347044944763,
"learning_rate": 1.87819398476921e-05,
"loss": 1.2877,
"step": 9320
},
{
"epoch": 0.9841772151898734,
"grad_norm": 0.4837753474712372,
"learning_rate": 1.8523059329000848e-05,
"loss": 1.2993,
"step": 9330
},
{
"epoch": 0.9852320675105485,
"grad_norm": 0.48834121227264404,
"learning_rate": 1.826774708512579e-05,
"loss": 1.299,
"step": 9340
},
{
"epoch": 0.9862869198312236,
"grad_norm": 0.46572625637054443,
"learning_rate": 1.8015953932817347e-05,
"loss": 1.3067,
"step": 9350
},
{
"epoch": 0.9873417721518988,
"grad_norm": 0.4712361991405487,
"learning_rate": 1.7767631366742332e-05,
"loss": 1.3005,
"step": 9360
},
{
"epoch": 0.9883966244725738,
"grad_norm": 0.47739508748054504,
"learning_rate": 1.7522731550139926e-05,
"loss": 1.2992,
"step": 9370
},
{
"epoch": 0.989451476793249,
"grad_norm": 0.4772108495235443,
"learning_rate": 1.728120730560641e-05,
"loss": 1.2836,
"step": 9380
},
{
"epoch": 0.990506329113924,
"grad_norm": 0.5069162845611572,
"learning_rate": 1.704301210600693e-05,
"loss": 1.2882,
"step": 9390
},
{
"epoch": 0.9915611814345991,
"grad_norm": 0.4878447651863098,
"learning_rate": 1.6808100065512536e-05,
"loss": 1.3021,
"step": 9400
},
{
"epoch": 0.9926160337552743,
"grad_norm": 0.4973181486129761,
"learning_rate": 1.657642593076074e-05,
"loss": 1.2759,
"step": 9410
},
{
"epoch": 0.9936708860759493,
"grad_norm": 0.4791336953639984,
"learning_rate": 1.634794507213793e-05,
"loss": 1.298,
"step": 9420
},
{
"epoch": 0.9947257383966245,
"grad_norm": 0.47102054953575134,
"learning_rate": 1.6122613475181976e-05,
"loss": 1.2966,
"step": 9430
},
{
"epoch": 0.9957805907172996,
"grad_norm": 0.4634316861629486,
"learning_rate": 1.590038773210323e-05,
"loss": 1.292,
"step": 9440
},
{
"epoch": 0.9968354430379747,
"grad_norm": 0.5019749402999878,
"learning_rate": 1.568122503342252e-05,
"loss": 1.3006,
"step": 9450
},
{
"epoch": 0.9978902953586498,
"grad_norm": 0.48680579662323,
"learning_rate": 1.5465083159724344e-05,
"loss": 1.2859,
"step": 9460
},
{
"epoch": 0.9989451476793249,
"grad_norm": 0.47166094183921814,
"learning_rate": 1.5251920473523708e-05,
"loss": 1.2984,
"step": 9470
},
{
"epoch": 1.0,
"grad_norm": 1.3670523166656494,
"learning_rate": 1.5041695911245136e-05,
"loss": 1.2902,
"step": 9480
}
],
"logging_steps": 10,
"max_steps": 9480,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.832308198648013e+16,
"train_batch_size": 1024,
"trial_name": null,
"trial_params": null
}