|
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 3.0,
|
|
"eval_steps": 500,
|
|
"global_step": 3444,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.008710801393728223,
|
|
"grad_norm": 96.23190307617188,
|
|
"learning_rate": 2.0000000000000002e-07,
|
|
"loss": 10.808,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.017421602787456445,
|
|
"grad_norm": 463.5845642089844,
|
|
"learning_rate": 4.0000000000000003e-07,
|
|
"loss": 10.8147,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.02613240418118467,
|
|
"grad_norm": 0.10578063130378723,
|
|
"learning_rate": 6.000000000000001e-07,
|
|
"loss": 10.8077,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.03484320557491289,
|
|
"grad_norm": 0.09780663996934891,
|
|
"learning_rate": 8.000000000000001e-07,
|
|
"loss": 10.8218,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.04355400696864112,
|
|
"grad_norm": 1469.5677490234375,
|
|
"learning_rate": 1.0000000000000002e-06,
|
|
"loss": 10.8079,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.05226480836236934,
|
|
"grad_norm": 0.09817048907279968,
|
|
"learning_rate": 1.2000000000000002e-06,
|
|
"loss": 10.8204,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.06097560975609756,
|
|
"grad_norm": 0.09422990679740906,
|
|
"learning_rate": 1.4000000000000001e-06,
|
|
"loss": 10.8159,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.06968641114982578,
|
|
"grad_norm": 0.09545309096574783,
|
|
"learning_rate": 1.6000000000000001e-06,
|
|
"loss": 10.811,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.078397212543554,
|
|
"grad_norm": 0.08887256681919098,
|
|
"learning_rate": 1.8000000000000001e-06,
|
|
"loss": 10.8048,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.08710801393728224,
|
|
"grad_norm": 0.0909484326839447,
|
|
"learning_rate": 2.0000000000000003e-06,
|
|
"loss": 10.8126,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.09581881533101046,
|
|
"grad_norm": 0.09697296470403671,
|
|
"learning_rate": 2.2e-06,
|
|
"loss": 10.8056,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.10452961672473868,
|
|
"grad_norm": 0.09927204251289368,
|
|
"learning_rate": 2.4000000000000003e-06,
|
|
"loss": 11.0541,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.1132404181184669,
|
|
"grad_norm": 0.09392143785953522,
|
|
"learning_rate": 2.6e-06,
|
|
"loss": 10.8073,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.12195121951219512,
|
|
"grad_norm": 0.09451345354318619,
|
|
"learning_rate": 2.8000000000000003e-06,
|
|
"loss": 10.8041,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.13066202090592335,
|
|
"grad_norm": 0.09947695583105087,
|
|
"learning_rate": 3e-06,
|
|
"loss": 10.8141,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.13937282229965156,
|
|
"grad_norm": 0.09803537279367447,
|
|
"learning_rate": 3.2000000000000003e-06,
|
|
"loss": 10.805,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.1480836236933798,
|
|
"grad_norm": 0.09703430533409119,
|
|
"learning_rate": 3.4000000000000005e-06,
|
|
"loss": 10.8193,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.156794425087108,
|
|
"grad_norm": 0.09272942692041397,
|
|
"learning_rate": 3.6000000000000003e-06,
|
|
"loss": 10.8151,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.16550522648083624,
|
|
"grad_norm": 0.0950935110449791,
|
|
"learning_rate": 3.8000000000000005e-06,
|
|
"loss": 10.8051,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.17421602787456447,
|
|
"grad_norm": 0.10270467400550842,
|
|
"learning_rate": 4.000000000000001e-06,
|
|
"loss": 10.8043,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.18292682926829268,
|
|
"grad_norm": 0.09881923347711563,
|
|
"learning_rate": 4.2000000000000004e-06,
|
|
"loss": 10.8044,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.1916376306620209,
|
|
"grad_norm": 0.10348394513130188,
|
|
"learning_rate": 4.4e-06,
|
|
"loss": 10.8036,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.20034843205574912,
|
|
"grad_norm": 0.09483418613672256,
|
|
"learning_rate": 4.600000000000001e-06,
|
|
"loss": 10.8046,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.20905923344947736,
|
|
"grad_norm": 0.09620082378387451,
|
|
"learning_rate": 4.800000000000001e-06,
|
|
"loss": 10.807,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.21777003484320556,
|
|
"grad_norm": 0.09709980338811874,
|
|
"learning_rate": 5e-06,
|
|
"loss": 10.8058,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.2264808362369338,
|
|
"grad_norm": 0.09373420476913452,
|
|
"learning_rate": 5.2e-06,
|
|
"loss": 10.8044,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.23519163763066203,
|
|
"grad_norm": 0.09700135141611099,
|
|
"learning_rate": 5.400000000000001e-06,
|
|
"loss": 10.8168,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.24390243902439024,
|
|
"grad_norm": 0.09450981020927429,
|
|
"learning_rate": 5.600000000000001e-06,
|
|
"loss": 10.8043,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.25261324041811845,
|
|
"grad_norm": 0.09983480721712112,
|
|
"learning_rate": 5.8e-06,
|
|
"loss": 10.8037,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.2613240418118467,
|
|
"grad_norm": 0.09153986722230911,
|
|
"learning_rate": 6e-06,
|
|
"loss": 10.8041,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.2700348432055749,
|
|
"grad_norm": 0.09664902836084366,
|
|
"learning_rate": 6.200000000000001e-06,
|
|
"loss": 10.805,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.2787456445993031,
|
|
"grad_norm": 0.08636524528265,
|
|
"learning_rate": 6.4000000000000006e-06,
|
|
"loss": 10.8049,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.2874564459930314,
|
|
"grad_norm": 0.0945158526301384,
|
|
"learning_rate": 6.600000000000001e-06,
|
|
"loss": 10.8047,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.2961672473867596,
|
|
"grad_norm": 0.09447965025901794,
|
|
"learning_rate": 6.800000000000001e-06,
|
|
"loss": 10.8044,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.3048780487804878,
|
|
"grad_norm": 0.0949367955327034,
|
|
"learning_rate": 7e-06,
|
|
"loss": 10.8038,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.313588850174216,
|
|
"grad_norm": 0.10188119113445282,
|
|
"learning_rate": 7.2000000000000005e-06,
|
|
"loss": 10.8037,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.32229965156794427,
|
|
"grad_norm": 0.09333271533250809,
|
|
"learning_rate": 7.4e-06,
|
|
"loss": 10.8029,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.3310104529616725,
|
|
"grad_norm": 0.10046978294849396,
|
|
"learning_rate": 7.600000000000001e-06,
|
|
"loss": 10.8037,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.3397212543554007,
|
|
"grad_norm": 0.09508766233921051,
|
|
"learning_rate": 7.800000000000002e-06,
|
|
"loss": 10.8065,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.34843205574912894,
|
|
"grad_norm": 0.09448560327291489,
|
|
"learning_rate": 8.000000000000001e-06,
|
|
"loss": 10.8058,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.35714285714285715,
|
|
"grad_norm": 0.09986421465873718,
|
|
"learning_rate": 8.2e-06,
|
|
"loss": 14.4299,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.36585365853658536,
|
|
"grad_norm": 0.10298220068216324,
|
|
"learning_rate": 8.400000000000001e-06,
|
|
"loss": 10.8035,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.37456445993031356,
|
|
"grad_norm": 0.09236953407526016,
|
|
"learning_rate": 8.6e-06,
|
|
"loss": 10.8309,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.3832752613240418,
|
|
"grad_norm": 0.09000707417726517,
|
|
"learning_rate": 8.8e-06,
|
|
"loss": 10.8027,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.39198606271777003,
|
|
"grad_norm": 0.09554922580718994,
|
|
"learning_rate": 9e-06,
|
|
"loss": 10.8045,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.40069686411149824,
|
|
"grad_norm": 130.35623168945312,
|
|
"learning_rate": 9.200000000000002e-06,
|
|
"loss": 10.8034,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.4094076655052265,
|
|
"grad_norm": 0.09580319374799728,
|
|
"learning_rate": 9.4e-06,
|
|
"loss": 10.8022,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 0.4181184668989547,
|
|
"grad_norm": 0.10328342765569687,
|
|
"learning_rate": 9.600000000000001e-06,
|
|
"loss": 10.8087,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 0.4268292682926829,
|
|
"grad_norm": 0.09383571892976761,
|
|
"learning_rate": 9.800000000000001e-06,
|
|
"loss": 10.8027,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 0.4355400696864111,
|
|
"grad_norm": 61532.49609375,
|
|
"learning_rate": 1e-05,
|
|
"loss": 15.4887,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.4442508710801394,
|
|
"grad_norm": 0.09285067766904831,
|
|
"learning_rate": 9.966032608695653e-06,
|
|
"loss": 10.8028,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 0.4529616724738676,
|
|
"grad_norm": 0.09329842031002045,
|
|
"learning_rate": 9.932065217391306e-06,
|
|
"loss": 10.8969,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 0.4616724738675958,
|
|
"grad_norm": 0.09399975091218948,
|
|
"learning_rate": 9.898097826086957e-06,
|
|
"loss": 10.8297,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 0.47038327526132406,
|
|
"grad_norm": 1130.7960205078125,
|
|
"learning_rate": 9.86413043478261e-06,
|
|
"loss": 10.8358,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 0.47909407665505227,
|
|
"grad_norm": 0.0971442461013794,
|
|
"learning_rate": 9.830163043478262e-06,
|
|
"loss": 10.8641,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 0.4878048780487805,
|
|
"grad_norm": 0.09969327598810196,
|
|
"learning_rate": 9.796195652173915e-06,
|
|
"loss": 10.8124,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 0.4965156794425087,
|
|
"grad_norm": 0.0888582393527031,
|
|
"learning_rate": 9.762228260869566e-06,
|
|
"loss": 10.9576,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 0.5052264808362369,
|
|
"grad_norm": 0.09847419708967209,
|
|
"learning_rate": 9.728260869565218e-06,
|
|
"loss": 10.8022,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 0.5139372822299652,
|
|
"grad_norm": 0.0947929099202156,
|
|
"learning_rate": 9.694293478260869e-06,
|
|
"loss": 10.8011,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 0.5226480836236934,
|
|
"grad_norm": 9.681638717651367,
|
|
"learning_rate": 9.660326086956523e-06,
|
|
"loss": 10.8015,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.5313588850174216,
|
|
"grad_norm": 0.09560558199882507,
|
|
"learning_rate": 9.626358695652174e-06,
|
|
"loss": 13.2261,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 0.5400696864111498,
|
|
"grad_norm": 0.09392021596431732,
|
|
"learning_rate": 9.592391304347827e-06,
|
|
"loss": 10.802,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 0.5487804878048781,
|
|
"grad_norm": 0.10044202208518982,
|
|
"learning_rate": 9.558423913043478e-06,
|
|
"loss": 10.8005,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 0.5574912891986062,
|
|
"grad_norm": 0.09005896002054214,
|
|
"learning_rate": 9.524456521739132e-06,
|
|
"loss": 10.802,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 0.5662020905923345,
|
|
"grad_norm": 0.09686450660228729,
|
|
"learning_rate": 9.490489130434783e-06,
|
|
"loss": 10.801,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 0.5749128919860628,
|
|
"grad_norm": 0.09321983903646469,
|
|
"learning_rate": 9.456521739130436e-06,
|
|
"loss": 10.8008,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 0.5836236933797909,
|
|
"grad_norm": 0.0884283185005188,
|
|
"learning_rate": 9.422554347826087e-06,
|
|
"loss": 10.801,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 0.5923344947735192,
|
|
"grad_norm": 0.09928812086582184,
|
|
"learning_rate": 9.388586956521741e-06,
|
|
"loss": 10.7998,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 0.6010452961672473,
|
|
"grad_norm": 0.09434370696544647,
|
|
"learning_rate": 9.354619565217392e-06,
|
|
"loss": 10.8007,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 0.6097560975609756,
|
|
"grad_norm": 0.09228064864873886,
|
|
"learning_rate": 9.320652173913044e-06,
|
|
"loss": 10.8002,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.6184668989547039,
|
|
"grad_norm": 0.09412040561437607,
|
|
"learning_rate": 9.286684782608695e-06,
|
|
"loss": 10.8009,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 0.627177700348432,
|
|
"grad_norm": 0.0975504145026207,
|
|
"learning_rate": 9.25271739130435e-06,
|
|
"loss": 10.8002,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 0.6358885017421603,
|
|
"grad_norm": 0.10332240164279938,
|
|
"learning_rate": 9.21875e-06,
|
|
"loss": 10.8001,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 0.6445993031358885,
|
|
"grad_norm": 0.09318797290325165,
|
|
"learning_rate": 9.184782608695653e-06,
|
|
"loss": 10.8001,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 0.6533101045296167,
|
|
"grad_norm": 0.09171754121780396,
|
|
"learning_rate": 9.150815217391306e-06,
|
|
"loss": 10.7999,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 0.662020905923345,
|
|
"grad_norm": 0.0921366959810257,
|
|
"learning_rate": 9.116847826086958e-06,
|
|
"loss": 10.8003,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 0.6707317073170732,
|
|
"grad_norm": 0.09661433100700378,
|
|
"learning_rate": 9.08288043478261e-06,
|
|
"loss": 10.8001,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 0.6794425087108014,
|
|
"grad_norm": 0.09323884546756744,
|
|
"learning_rate": 9.048913043478262e-06,
|
|
"loss": 10.7997,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 0.6881533101045296,
|
|
"grad_norm": 0.08752495795488358,
|
|
"learning_rate": 9.014945652173914e-06,
|
|
"loss": 10.8,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 0.6968641114982579,
|
|
"grad_norm": 0.09087631851434708,
|
|
"learning_rate": 8.980978260869567e-06,
|
|
"loss": 10.798,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.705574912891986,
|
|
"grad_norm": 0.09523475170135498,
|
|
"learning_rate": 8.947010869565218e-06,
|
|
"loss": 10.7994,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 0.7142857142857143,
|
|
"grad_norm": 0.09918010234832764,
|
|
"learning_rate": 8.91304347826087e-06,
|
|
"loss": 10.7996,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 0.7229965156794426,
|
|
"grad_norm": 0.09539210051298141,
|
|
"learning_rate": 8.879076086956523e-06,
|
|
"loss": 10.7993,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 0.7317073170731707,
|
|
"grad_norm": 0.09296409040689468,
|
|
"learning_rate": 8.845108695652174e-06,
|
|
"loss": 10.7993,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 0.740418118466899,
|
|
"grad_norm": 0.08845611661672592,
|
|
"learning_rate": 8.811141304347827e-06,
|
|
"loss": 10.7993,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 0.7491289198606271,
|
|
"grad_norm": 0.0954870954155922,
|
|
"learning_rate": 8.77717391304348e-06,
|
|
"loss": 10.7981,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 0.7578397212543554,
|
|
"grad_norm": 0.09371493011713028,
|
|
"learning_rate": 8.743206521739132e-06,
|
|
"loss": 10.7984,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 0.7665505226480837,
|
|
"grad_norm": 0.09903618693351746,
|
|
"learning_rate": 8.709239130434783e-06,
|
|
"loss": 10.7987,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 0.7752613240418118,
|
|
"grad_norm": 0.08991510421037674,
|
|
"learning_rate": 8.675271739130435e-06,
|
|
"loss": 10.7976,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 0.7839721254355401,
|
|
"grad_norm": 0.09141765534877777,
|
|
"learning_rate": 8.641304347826088e-06,
|
|
"loss": 10.799,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.7926829268292683,
|
|
"grad_norm": 0.09670023620128632,
|
|
"learning_rate": 8.60733695652174e-06,
|
|
"loss": 10.7973,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 0.8013937282229965,
|
|
"grad_norm": 0.09337509423494339,
|
|
"learning_rate": 8.573369565217391e-06,
|
|
"loss": 10.7985,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 0.8101045296167247,
|
|
"grad_norm": 0.09618163853883743,
|
|
"learning_rate": 8.539402173913044e-06,
|
|
"loss": 10.7989,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 0.818815331010453,
|
|
"grad_norm": 0.09537822753190994,
|
|
"learning_rate": 8.505434782608697e-06,
|
|
"loss": 10.7981,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 0.8275261324041812,
|
|
"grad_norm": 0.09312313050031662,
|
|
"learning_rate": 8.47146739130435e-06,
|
|
"loss": 10.7984,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 0.8362369337979094,
|
|
"grad_norm": 0.09118187427520752,
|
|
"learning_rate": 8.4375e-06,
|
|
"loss": 10.7981,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 0.8449477351916377,
|
|
"grad_norm": 0.0919727087020874,
|
|
"learning_rate": 8.403532608695653e-06,
|
|
"loss": 10.7978,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 0.8536585365853658,
|
|
"grad_norm": 0.09706488996744156,
|
|
"learning_rate": 8.369565217391305e-06,
|
|
"loss": 10.7979,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 0.8623693379790941,
|
|
"grad_norm": 0.0944119319319725,
|
|
"learning_rate": 8.335597826086958e-06,
|
|
"loss": 10.7983,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 0.8710801393728222,
|
|
"grad_norm": 0.1016387864947319,
|
|
"learning_rate": 8.301630434782609e-06,
|
|
"loss": 10.7973,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.8797909407665505,
|
|
"grad_norm": 0.10198845714330673,
|
|
"learning_rate": 8.267663043478261e-06,
|
|
"loss": 10.7984,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 0.8885017421602788,
|
|
"grad_norm": 0.10102390497922897,
|
|
"learning_rate": 8.233695652173914e-06,
|
|
"loss": 10.7971,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 0.8972125435540069,
|
|
"grad_norm": 0.0972791463136673,
|
|
"learning_rate": 8.199728260869567e-06,
|
|
"loss": 10.7971,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 0.9059233449477352,
|
|
"grad_norm": 0.09343789517879486,
|
|
"learning_rate": 8.165760869565218e-06,
|
|
"loss": 10.7978,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 0.9146341463414634,
|
|
"grad_norm": 0.09384151548147202,
|
|
"learning_rate": 8.13179347826087e-06,
|
|
"loss": 10.797,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 0.9233449477351916,
|
|
"grad_norm": 0.09324201196432114,
|
|
"learning_rate": 8.097826086956523e-06,
|
|
"loss": 10.7969,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 0.9320557491289199,
|
|
"grad_norm": 0.09341388940811157,
|
|
"learning_rate": 8.063858695652175e-06,
|
|
"loss": 10.7972,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 0.9407665505226481,
|
|
"grad_norm": 0.09563016146421432,
|
|
"learning_rate": 8.029891304347826e-06,
|
|
"loss": 10.7978,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 0.9494773519163763,
|
|
"grad_norm": 0.09752058237791061,
|
|
"learning_rate": 7.995923913043479e-06,
|
|
"loss": 10.7962,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 0.9581881533101045,
|
|
"grad_norm": 0.09652635455131531,
|
|
"learning_rate": 7.961956521739131e-06,
|
|
"loss": 10.7968,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.9668989547038328,
|
|
"grad_norm": 0.097730852663517,
|
|
"learning_rate": 7.927989130434784e-06,
|
|
"loss": 10.7967,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 0.975609756097561,
|
|
"grad_norm": 0.09178122133016586,
|
|
"learning_rate": 7.894021739130435e-06,
|
|
"loss": 10.7963,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 0.9843205574912892,
|
|
"grad_norm": 0.09718208760023117,
|
|
"learning_rate": 7.860054347826088e-06,
|
|
"loss": 10.7967,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 0.9930313588850174,
|
|
"grad_norm": 0.09228435158729553,
|
|
"learning_rate": 7.82608695652174e-06,
|
|
"loss": 10.7973,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 1.0017421602787457,
|
|
"grad_norm": 0.08885621279478073,
|
|
"learning_rate": 7.792119565217393e-06,
|
|
"loss": 10.796,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 1.0104529616724738,
|
|
"grad_norm": 0.09165474027395248,
|
|
"learning_rate": 7.758152173913044e-06,
|
|
"loss": 10.7957,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 1.019163763066202,
|
|
"grad_norm": 0.09650120139122009,
|
|
"learning_rate": 7.724184782608696e-06,
|
|
"loss": 10.7967,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 1.0278745644599303,
|
|
"grad_norm": 0.10563351958990097,
|
|
"learning_rate": 7.690217391304349e-06,
|
|
"loss": 10.7963,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 1.0365853658536586,
|
|
"grad_norm": 0.09407645463943481,
|
|
"learning_rate": 7.656250000000001e-06,
|
|
"loss": 10.7958,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 1.0452961672473868,
|
|
"grad_norm": 0.100870780646801,
|
|
"learning_rate": 7.622282608695652e-06,
|
|
"loss": 10.7961,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 1.054006968641115,
|
|
"grad_norm": 0.09036817401647568,
|
|
"learning_rate": 7.588315217391306e-06,
|
|
"loss": 10.7965,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 1.0627177700348431,
|
|
"grad_norm": 0.09792335331439972,
|
|
"learning_rate": 7.5543478260869576e-06,
|
|
"loss": 10.7951,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 1.0714285714285714,
|
|
"grad_norm": 0.10280462354421616,
|
|
"learning_rate": 7.520380434782609e-06,
|
|
"loss": 10.796,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 1.0801393728222997,
|
|
"grad_norm": 0.09284704178571701,
|
|
"learning_rate": 7.486413043478261e-06,
|
|
"loss": 10.796,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 1.088850174216028,
|
|
"grad_norm": 0.09495861083269119,
|
|
"learning_rate": 7.4524456521739145e-06,
|
|
"loss": 10.7954,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 1.0975609756097562,
|
|
"grad_norm": 0.0963548794388771,
|
|
"learning_rate": 7.418478260869566e-06,
|
|
"loss": 10.7954,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 1.1062717770034842,
|
|
"grad_norm": 0.10302776098251343,
|
|
"learning_rate": 7.384510869565218e-06,
|
|
"loss": 10.7946,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 1.1149825783972125,
|
|
"grad_norm": 0.09135697036981583,
|
|
"learning_rate": 7.35054347826087e-06,
|
|
"loss": 10.7951,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 1.1236933797909407,
|
|
"grad_norm": 0.09077905118465424,
|
|
"learning_rate": 7.316576086956522e-06,
|
|
"loss": 10.7949,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 1.132404181184669,
|
|
"grad_norm": 0.09623997658491135,
|
|
"learning_rate": 7.282608695652175e-06,
|
|
"loss": 10.795,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 1.1411149825783973,
|
|
"grad_norm": 0.09430749714374542,
|
|
"learning_rate": 7.248641304347827e-06,
|
|
"loss": 10.7954,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 1.1498257839721253,
|
|
"grad_norm": 0.09416501224040985,
|
|
"learning_rate": 7.2146739130434785e-06,
|
|
"loss": 10.795,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 1.1585365853658536,
|
|
"grad_norm": 0.09839834272861481,
|
|
"learning_rate": 7.180706521739131e-06,
|
|
"loss": 10.795,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 1.1672473867595818,
|
|
"grad_norm": 0.09414810687303543,
|
|
"learning_rate": 7.146739130434784e-06,
|
|
"loss": 10.7949,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 1.17595818815331,
|
|
"grad_norm": 0.09453831613063812,
|
|
"learning_rate": 7.1127717391304354e-06,
|
|
"loss": 10.942,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 1.1846689895470384,
|
|
"grad_norm": 637.3465576171875,
|
|
"learning_rate": 7.078804347826087e-06,
|
|
"loss": 10.8174,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 1.1933797909407666,
|
|
"grad_norm": 0.09487204998731613,
|
|
"learning_rate": 7.04483695652174e-06,
|
|
"loss": 16.214,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 1.202090592334495,
|
|
"grad_norm": 0.08683289587497711,
|
|
"learning_rate": 7.0108695652173915e-06,
|
|
"loss": 10.7961,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 1.210801393728223,
|
|
"grad_norm": 0.09138838201761246,
|
|
"learning_rate": 6.976902173913044e-06,
|
|
"loss": 10.7954,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 1.2195121951219512,
|
|
"grad_norm": 0.09047773480415344,
|
|
"learning_rate": 6.942934782608696e-06,
|
|
"loss": 10.7941,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 1.2282229965156795,
|
|
"grad_norm": 0.10373370349407196,
|
|
"learning_rate": 6.9089673913043485e-06,
|
|
"loss": 10.7933,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 1.2369337979094077,
|
|
"grad_norm": 0.0910143107175827,
|
|
"learning_rate": 6.875e-06,
|
|
"loss": 10.7952,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 1.245644599303136,
|
|
"grad_norm": 0.08921236544847488,
|
|
"learning_rate": 6.841032608695653e-06,
|
|
"loss": 10.7945,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 1.254355400696864,
|
|
"grad_norm": 0.09373684972524643,
|
|
"learning_rate": 6.8070652173913054e-06,
|
|
"loss": 10.795,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 1.2630662020905923,
|
|
"grad_norm": 0.09562421590089798,
|
|
"learning_rate": 6.773097826086957e-06,
|
|
"loss": 10.7948,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 1.2717770034843205,
|
|
"grad_norm": 0.0909007340669632,
|
|
"learning_rate": 6.739130434782609e-06,
|
|
"loss": 10.7945,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 1.2804878048780488,
|
|
"grad_norm": 0.09700188040733337,
|
|
"learning_rate": 6.705163043478261e-06,
|
|
"loss": 10.7947,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 1.289198606271777,
|
|
"grad_norm": 0.09472058713436127,
|
|
"learning_rate": 6.671195652173914e-06,
|
|
"loss": 10.7942,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 1.297909407665505,
|
|
"grad_norm": 0.09371300041675568,
|
|
"learning_rate": 6.637228260869566e-06,
|
|
"loss": 10.793,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 1.3066202090592334,
|
|
"grad_norm": 0.08980853110551834,
|
|
"learning_rate": 6.603260869565218e-06,
|
|
"loss": 10.7936,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 1.3153310104529616,
|
|
"grad_norm": 0.09769923985004425,
|
|
"learning_rate": 6.569293478260869e-06,
|
|
"loss": 10.7939,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 1.32404181184669,
|
|
"grad_norm": 0.0956735908985138,
|
|
"learning_rate": 6.535326086956523e-06,
|
|
"loss": 10.7934,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 1.3327526132404182,
|
|
"grad_norm": 0.09061311185359955,
|
|
"learning_rate": 6.501358695652175e-06,
|
|
"loss": 10.7937,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 1.3414634146341464,
|
|
"grad_norm": 72412.171875,
|
|
"learning_rate": 6.467391304347826e-06,
|
|
"loss": 13.1839,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 1.3501742160278747,
|
|
"grad_norm": 0.09127330780029297,
|
|
"learning_rate": 6.433423913043478e-06,
|
|
"loss": 13.2599,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 1.3588850174216027,
|
|
"grad_norm": 0.09337225556373596,
|
|
"learning_rate": 6.3994565217391316e-06,
|
|
"loss": 10.8129,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 1.367595818815331,
|
|
"grad_norm": 0.10567159950733185,
|
|
"learning_rate": 6.365489130434783e-06,
|
|
"loss": 10.7932,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 1.3763066202090593,
|
|
"grad_norm": 0.09582757949829102,
|
|
"learning_rate": 6.331521739130435e-06,
|
|
"loss": 10.7934,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 1.3850174216027875,
|
|
"grad_norm": 0.09011757373809814,
|
|
"learning_rate": 6.297554347826087e-06,
|
|
"loss": 10.7934,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 1.3937282229965158,
|
|
"grad_norm": 0.09461648017168045,
|
|
"learning_rate": 6.26358695652174e-06,
|
|
"loss": 10.7922,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 1.4024390243902438,
|
|
"grad_norm": 0.0957835465669632,
|
|
"learning_rate": 6.229619565217392e-06,
|
|
"loss": 10.793,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 1.411149825783972,
|
|
"grad_norm": 0.09626618772745132,
|
|
"learning_rate": 6.195652173913044e-06,
|
|
"loss": 10.7918,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 1.4198606271777003,
|
|
"grad_norm": 0.09974712133407593,
|
|
"learning_rate": 6.1616847826086955e-06,
|
|
"loss": 10.7932,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 1.4285714285714286,
|
|
"grad_norm": 0.09606435894966125,
|
|
"learning_rate": 6.127717391304349e-06,
|
|
"loss": 10.7926,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 1.4372822299651569,
|
|
"grad_norm": 0.10175999999046326,
|
|
"learning_rate": 6.093750000000001e-06,
|
|
"loss": 10.7929,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 1.445993031358885,
|
|
"grad_norm": 0.09452734142541885,
|
|
"learning_rate": 6.0597826086956525e-06,
|
|
"loss": 10.7936,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 1.4547038327526132,
|
|
"grad_norm": 0.09544497728347778,
|
|
"learning_rate": 6.025815217391305e-06,
|
|
"loss": 10.793,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 1.4634146341463414,
|
|
"grad_norm": 0.0867006853222847,
|
|
"learning_rate": 5.991847826086957e-06,
|
|
"loss": 10.7931,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 1.4721254355400697,
|
|
"grad_norm": 0.09889491647481918,
|
|
"learning_rate": 5.9578804347826094e-06,
|
|
"loss": 10.7926,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 1.480836236933798,
|
|
"grad_norm": 0.09840565174818039,
|
|
"learning_rate": 5.923913043478261e-06,
|
|
"loss": 10.7926,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 1.489547038327526,
|
|
"grad_norm": 0.09129170328378677,
|
|
"learning_rate": 5.889945652173914e-06,
|
|
"loss": 10.7921,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 1.4982578397212545,
|
|
"grad_norm": 0.09560216963291168,
|
|
"learning_rate": 5.8559782608695656e-06,
|
|
"loss": 10.7924,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 1.5069686411149825,
|
|
"grad_norm": 0.09289251267910004,
|
|
"learning_rate": 5.822010869565218e-06,
|
|
"loss": 10.792,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 1.5156794425087108,
|
|
"grad_norm": 0.09835352748632431,
|
|
"learning_rate": 5.78804347826087e-06,
|
|
"loss": 10.7917,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 1.524390243902439,
|
|
"grad_norm": 0.10089673846960068,
|
|
"learning_rate": 5.7540760869565225e-06,
|
|
"loss": 10.7921,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 1.533101045296167,
|
|
"grad_norm": 0.0979476273059845,
|
|
"learning_rate": 5.720108695652174e-06,
|
|
"loss": 10.7933,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 1.5418118466898956,
|
|
"grad_norm": 0.10268489271402359,
|
|
"learning_rate": 5.686141304347826e-06,
|
|
"loss": 10.7946,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 1.5505226480836236,
|
|
"grad_norm": 0.09039770066738129,
|
|
"learning_rate": 5.652173913043479e-06,
|
|
"loss": 10.7923,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 1.5592334494773519,
|
|
"grad_norm": 0.09356367588043213,
|
|
"learning_rate": 5.618206521739131e-06,
|
|
"loss": 10.7918,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 1.5679442508710801,
|
|
"grad_norm": 0.09169076383113861,
|
|
"learning_rate": 5.584239130434783e-06,
|
|
"loss": 10.7919,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 1.5766550522648084,
|
|
"grad_norm": 0.09374472498893738,
|
|
"learning_rate": 5.550271739130435e-06,
|
|
"loss": 10.7922,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 1.5853658536585367,
|
|
"grad_norm": 0.09261377900838852,
|
|
"learning_rate": 5.516304347826087e-06,
|
|
"loss": 10.7911,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 1.5940766550522647,
|
|
"grad_norm": 0.0917358323931694,
|
|
"learning_rate": 5.48233695652174e-06,
|
|
"loss": 10.7926,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 1.6027874564459932,
|
|
"grad_norm": 0.09259490668773651,
|
|
"learning_rate": 5.448369565217392e-06,
|
|
"loss": 10.7913,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 1.6114982578397212,
|
|
"grad_norm": 0.09436757117509842,
|
|
"learning_rate": 5.4144021739130434e-06,
|
|
"loss": 10.7907,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 1.6202090592334495,
|
|
"grad_norm": 0.0956592783331871,
|
|
"learning_rate": 5.380434782608695e-06,
|
|
"loss": 10.7925,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 1.6289198606271778,
|
|
"grad_norm": 0.09467855840921402,
|
|
"learning_rate": 5.346467391304349e-06,
|
|
"loss": 10.7921,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 1.6376306620209058,
|
|
"grad_norm": 0.0989622175693512,
|
|
"learning_rate": 5.3125e-06,
|
|
"loss": 10.7918,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 1.6463414634146343,
|
|
"grad_norm": 0.09564294666051865,
|
|
"learning_rate": 5.278532608695652e-06,
|
|
"loss": 10.7913,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 1.6550522648083623,
|
|
"grad_norm": 0.09419780969619751,
|
|
"learning_rate": 5.244565217391306e-06,
|
|
"loss": 10.7917,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 1.6637630662020906,
|
|
"grad_norm": 0.0949205756187439,
|
|
"learning_rate": 5.210597826086957e-06,
|
|
"loss": 10.791,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 1.6724738675958188,
|
|
"grad_norm": 0.09357789158821106,
|
|
"learning_rate": 5.176630434782609e-06,
|
|
"loss": 10.791,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 1.6811846689895469,
|
|
"grad_norm": 0.0988670364022255,
|
|
"learning_rate": 5.142663043478261e-06,
|
|
"loss": 10.7917,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 1.6898954703832754,
|
|
"grad_norm": 0.09996125102043152,
|
|
"learning_rate": 5.108695652173914e-06,
|
|
"loss": 10.7912,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 1.6986062717770034,
|
|
"grad_norm": 0.09966638684272766,
|
|
"learning_rate": 5.074728260869566e-06,
|
|
"loss": 10.825,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 1.7073170731707317,
|
|
"grad_norm": 0.09682171791791916,
|
|
"learning_rate": 5.040760869565218e-06,
|
|
"loss": 10.7906,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 1.71602787456446,
|
|
"grad_norm": 0.09694927930831909,
|
|
"learning_rate": 5.0067934782608696e-06,
|
|
"loss": 10.7911,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 1.7247386759581882,
|
|
"grad_norm": 0.09398549050092697,
|
|
"learning_rate": 4.972826086956522e-06,
|
|
"loss": 10.7909,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 1.7334494773519165,
|
|
"grad_norm": 0.08475075662136078,
|
|
"learning_rate": 4.938858695652175e-06,
|
|
"loss": 10.7916,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 1.7421602787456445,
|
|
"grad_norm": 0.09494160860776901,
|
|
"learning_rate": 4.9048913043478265e-06,
|
|
"loss": 10.7911,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 1.750871080139373,
|
|
"grad_norm": 0.08939459174871445,
|
|
"learning_rate": 4.870923913043479e-06,
|
|
"loss": 10.7906,
|
|
"step": 2010
|
|
},
|
|
{
|
|
"epoch": 1.759581881533101,
|
|
"grad_norm": 0.09646463394165039,
|
|
"learning_rate": 4.836956521739131e-06,
|
|
"loss": 10.7911,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 1.7682926829268293,
|
|
"grad_norm": 0.09423504769802094,
|
|
"learning_rate": 4.8029891304347835e-06,
|
|
"loss": 10.7906,
|
|
"step": 2030
|
|
},
|
|
{
|
|
"epoch": 1.7770034843205575,
|
|
"grad_norm": 0.09972324222326279,
|
|
"learning_rate": 4.769021739130435e-06,
|
|
"loss": 10.7914,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 1.7857142857142856,
|
|
"grad_norm": 0.10339873284101486,
|
|
"learning_rate": 4.735054347826088e-06,
|
|
"loss": 10.7898,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 1.794425087108014,
|
|
"grad_norm": 0.09187936782836914,
|
|
"learning_rate": 4.7010869565217396e-06,
|
|
"loss": 10.7915,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 1.8031358885017421,
|
|
"grad_norm": 0.09024845063686371,
|
|
"learning_rate": 4.667119565217391e-06,
|
|
"loss": 10.7902,
|
|
"step": 2070
|
|
},
|
|
{
|
|
"epoch": 1.8118466898954704,
|
|
"grad_norm": 0.08929532021284103,
|
|
"learning_rate": 4.633152173913044e-06,
|
|
"loss": 10.7905,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 1.8205574912891986,
|
|
"grad_norm": 0.09708672016859055,
|
|
"learning_rate": 4.599184782608696e-06,
|
|
"loss": 10.7901,
|
|
"step": 2090
|
|
},
|
|
{
|
|
"epoch": 1.8292682926829267,
|
|
"grad_norm": 0.10239408910274506,
|
|
"learning_rate": 4.565217391304348e-06,
|
|
"loss": 10.7896,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 1.8379790940766552,
|
|
"grad_norm": 0.10148520767688751,
|
|
"learning_rate": 4.53125e-06,
|
|
"loss": 10.7906,
|
|
"step": 2110
|
|
},
|
|
{
|
|
"epoch": 1.8466898954703832,
|
|
"grad_norm": 0.09579440951347351,
|
|
"learning_rate": 4.497282608695653e-06,
|
|
"loss": 10.7903,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 1.8554006968641115,
|
|
"grad_norm": 0.09891035407781601,
|
|
"learning_rate": 4.463315217391304e-06,
|
|
"loss": 10.7904,
|
|
"step": 2130
|
|
},
|
|
{
|
|
"epoch": 1.8641114982578397,
|
|
"grad_norm": 0.09411191940307617,
|
|
"learning_rate": 4.429347826086957e-06,
|
|
"loss": 10.7899,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 1.872822299651568,
|
|
"grad_norm": 0.10932313650846481,
|
|
"learning_rate": 4.395380434782609e-06,
|
|
"loss": 10.7907,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 1.8815331010452963,
|
|
"grad_norm": 0.08247391879558563,
|
|
"learning_rate": 4.361413043478261e-06,
|
|
"loss": 10.7902,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 1.8902439024390243,
|
|
"grad_norm": 0.0989106297492981,
|
|
"learning_rate": 4.327445652173913e-06,
|
|
"loss": 10.79,
|
|
"step": 2170
|
|
},
|
|
{
|
|
"epoch": 1.8989547038327528,
|
|
"grad_norm": 0.09022705256938934,
|
|
"learning_rate": 4.293478260869566e-06,
|
|
"loss": 10.7893,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 1.9076655052264808,
|
|
"grad_norm": 0.09723115712404251,
|
|
"learning_rate": 4.2595108695652174e-06,
|
|
"loss": 10.7892,
|
|
"step": 2190
|
|
},
|
|
{
|
|
"epoch": 1.916376306620209,
|
|
"grad_norm": 0.09660723060369492,
|
|
"learning_rate": 4.22554347826087e-06,
|
|
"loss": 10.7899,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 1.9250871080139373,
|
|
"grad_norm": 0.09882649034261703,
|
|
"learning_rate": 4.191576086956522e-06,
|
|
"loss": 10.7896,
|
|
"step": 2210
|
|
},
|
|
{
|
|
"epoch": 1.9337979094076654,
|
|
"grad_norm": 0.08623263239860535,
|
|
"learning_rate": 4.157608695652174e-06,
|
|
"loss": 10.7894,
|
|
"step": 2220
|
|
},
|
|
{
|
|
"epoch": 1.9425087108013939,
|
|
"grad_norm": 0.09180337190628052,
|
|
"learning_rate": 4.123641304347826e-06,
|
|
"loss": 10.7896,
|
|
"step": 2230
|
|
},
|
|
{
|
|
"epoch": 1.951219512195122,
|
|
"grad_norm": 0.09379716962575912,
|
|
"learning_rate": 4.089673913043479e-06,
|
|
"loss": 10.79,
|
|
"step": 2240
|
|
},
|
|
{
|
|
"epoch": 1.9599303135888502,
|
|
"grad_norm": 0.09439699351787567,
|
|
"learning_rate": 4.0557065217391305e-06,
|
|
"loss": 10.79,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 1.9686411149825784,
|
|
"grad_norm": 0.09595078229904175,
|
|
"learning_rate": 4.021739130434783e-06,
|
|
"loss": 10.7891,
|
|
"step": 2260
|
|
},
|
|
{
|
|
"epoch": 1.9773519163763065,
|
|
"grad_norm": 0.09709908813238144,
|
|
"learning_rate": 3.987771739130435e-06,
|
|
"loss": 10.7899,
|
|
"step": 2270
|
|
},
|
|
{
|
|
"epoch": 1.986062717770035,
|
|
"grad_norm": 0.0975862666964531,
|
|
"learning_rate": 3.9538043478260875e-06,
|
|
"loss": 10.7885,
|
|
"step": 2280
|
|
},
|
|
{
|
|
"epoch": 1.994773519163763,
|
|
"grad_norm": 0.09871925413608551,
|
|
"learning_rate": 3.919836956521739e-06,
|
|
"loss": 10.7895,
|
|
"step": 2290
|
|
},
|
|
{
|
|
"epoch": 2.0034843205574915,
|
|
"grad_norm": 0.09349826723337173,
|
|
"learning_rate": 3.885869565217392e-06,
|
|
"loss": 10.7883,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 2.0121951219512195,
|
|
"grad_norm": 0.09288814663887024,
|
|
"learning_rate": 3.8519021739130436e-06,
|
|
"loss": 10.7887,
|
|
"step": 2310
|
|
},
|
|
{
|
|
"epoch": 2.0209059233449476,
|
|
"grad_norm": 0.093532495200634,
|
|
"learning_rate": 3.817934782608696e-06,
|
|
"loss": 10.7892,
|
|
"step": 2320
|
|
},
|
|
{
|
|
"epoch": 2.029616724738676,
|
|
"grad_norm": 0.10073115676641464,
|
|
"learning_rate": 3.783967391304348e-06,
|
|
"loss": 10.789,
|
|
"step": 2330
|
|
},
|
|
{
|
|
"epoch": 2.038327526132404,
|
|
"grad_norm": 0.09286133199930191,
|
|
"learning_rate": 3.7500000000000005e-06,
|
|
"loss": 10.7899,
|
|
"step": 2340
|
|
},
|
|
{
|
|
"epoch": 2.0470383275261326,
|
|
"grad_norm": 0.08912254869937897,
|
|
"learning_rate": 3.7160326086956527e-06,
|
|
"loss": 10.7896,
|
|
"step": 2350
|
|
},
|
|
{
|
|
"epoch": 2.0557491289198606,
|
|
"grad_norm": 0.09220988303422928,
|
|
"learning_rate": 3.6820652173913044e-06,
|
|
"loss": 10.7892,
|
|
"step": 2360
|
|
},
|
|
{
|
|
"epoch": 2.0644599303135887,
|
|
"grad_norm": 67073.5859375,
|
|
"learning_rate": 3.648097826086957e-06,
|
|
"loss": 12.4148,
|
|
"step": 2370
|
|
},
|
|
{
|
|
"epoch": 2.073170731707317,
|
|
"grad_norm": 0.09415777027606964,
|
|
"learning_rate": 3.614130434782609e-06,
|
|
"loss": 12.1967,
|
|
"step": 2380
|
|
},
|
|
{
|
|
"epoch": 2.081881533101045,
|
|
"grad_norm": 0.09904878586530685,
|
|
"learning_rate": 3.5801630434782614e-06,
|
|
"loss": 10.7889,
|
|
"step": 2390
|
|
},
|
|
{
|
|
"epoch": 2.0905923344947737,
|
|
"grad_norm": 0.09543213993310928,
|
|
"learning_rate": 3.546195652173913e-06,
|
|
"loss": 10.7888,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 2.0993031358885017,
|
|
"grad_norm": 0.09227117896080017,
|
|
"learning_rate": 3.5122282608695658e-06,
|
|
"loss": 10.7894,
|
|
"step": 2410
|
|
},
|
|
{
|
|
"epoch": 2.10801393728223,
|
|
"grad_norm": 0.09841565787792206,
|
|
"learning_rate": 3.4782608695652175e-06,
|
|
"loss": 10.7894,
|
|
"step": 2420
|
|
},
|
|
{
|
|
"epoch": 2.1167247386759582,
|
|
"grad_norm": 0.093239426612854,
|
|
"learning_rate": 3.44429347826087e-06,
|
|
"loss": 10.7894,
|
|
"step": 2430
|
|
},
|
|
{
|
|
"epoch": 2.1254355400696863,
|
|
"grad_norm": 0.09383922815322876,
|
|
"learning_rate": 3.410326086956522e-06,
|
|
"loss": 10.7882,
|
|
"step": 2440
|
|
},
|
|
{
|
|
"epoch": 2.1341463414634148,
|
|
"grad_norm": 0.09073406457901001,
|
|
"learning_rate": 3.3763586956521745e-06,
|
|
"loss": 10.7897,
|
|
"step": 2450
|
|
},
|
|
{
|
|
"epoch": 2.142857142857143,
|
|
"grad_norm": 0.09569002687931061,
|
|
"learning_rate": 3.3423913043478262e-06,
|
|
"loss": 10.7897,
|
|
"step": 2460
|
|
},
|
|
{
|
|
"epoch": 2.1515679442508713,
|
|
"grad_norm": 0.0955529659986496,
|
|
"learning_rate": 3.308423913043479e-06,
|
|
"loss": 10.7883,
|
|
"step": 2470
|
|
},
|
|
{
|
|
"epoch": 2.1602787456445993,
|
|
"grad_norm": 0.10046317428350449,
|
|
"learning_rate": 3.2744565217391306e-06,
|
|
"loss": 10.7895,
|
|
"step": 2480
|
|
},
|
|
{
|
|
"epoch": 2.1689895470383274,
|
|
"grad_norm": 0.09671170264482498,
|
|
"learning_rate": 3.240489130434783e-06,
|
|
"loss": 10.789,
|
|
"step": 2490
|
|
},
|
|
{
|
|
"epoch": 2.177700348432056,
|
|
"grad_norm": 0.09719261527061462,
|
|
"learning_rate": 3.206521739130435e-06,
|
|
"loss": 10.788,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 2.186411149825784,
|
|
"grad_norm": 0.09898174554109573,
|
|
"learning_rate": 3.172554347826087e-06,
|
|
"loss": 10.788,
|
|
"step": 2510
|
|
},
|
|
{
|
|
"epoch": 2.1951219512195124,
|
|
"grad_norm": 0.09254367649555206,
|
|
"learning_rate": 3.1385869565217393e-06,
|
|
"loss": 10.788,
|
|
"step": 2520
|
|
},
|
|
{
|
|
"epoch": 2.2038327526132404,
|
|
"grad_norm": 0.10173038393259048,
|
|
"learning_rate": 3.1046195652173915e-06,
|
|
"loss": 10.7876,
|
|
"step": 2530
|
|
},
|
|
{
|
|
"epoch": 2.2125435540069684,
|
|
"grad_norm": 0.0905982032418251,
|
|
"learning_rate": 3.0706521739130436e-06,
|
|
"loss": 10.7889,
|
|
"step": 2540
|
|
},
|
|
{
|
|
"epoch": 2.221254355400697,
|
|
"grad_norm": 0.09394685924053192,
|
|
"learning_rate": 3.036684782608696e-06,
|
|
"loss": 10.7884,
|
|
"step": 2550
|
|
},
|
|
{
|
|
"epoch": 2.229965156794425,
|
|
"grad_norm": 0.10268858075141907,
|
|
"learning_rate": 3.002717391304348e-06,
|
|
"loss": 10.801,
|
|
"step": 2560
|
|
},
|
|
{
|
|
"epoch": 2.2386759581881535,
|
|
"grad_norm": 0.08987828344106674,
|
|
"learning_rate": 2.96875e-06,
|
|
"loss": 10.7892,
|
|
"step": 2570
|
|
},
|
|
{
|
|
"epoch": 2.2473867595818815,
|
|
"grad_norm": 0.09386924654245377,
|
|
"learning_rate": 2.9347826086956528e-06,
|
|
"loss": 10.7879,
|
|
"step": 2580
|
|
},
|
|
{
|
|
"epoch": 2.2560975609756095,
|
|
"grad_norm": 0.08885066956281662,
|
|
"learning_rate": 2.9008152173913045e-06,
|
|
"loss": 10.7887,
|
|
"step": 2590
|
|
},
|
|
{
|
|
"epoch": 2.264808362369338,
|
|
"grad_norm": 0.09525702148675919,
|
|
"learning_rate": 2.866847826086957e-06,
|
|
"loss": 10.7885,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 2.273519163763066,
|
|
"grad_norm": 0.09443964064121246,
|
|
"learning_rate": 2.832880434782609e-06,
|
|
"loss": 10.7887,
|
|
"step": 2610
|
|
},
|
|
{
|
|
"epoch": 2.2822299651567945,
|
|
"grad_norm": 0.09282263368368149,
|
|
"learning_rate": 2.7989130434782615e-06,
|
|
"loss": 10.7889,
|
|
"step": 2620
|
|
},
|
|
{
|
|
"epoch": 2.2909407665505226,
|
|
"grad_norm": 0.09663678705692291,
|
|
"learning_rate": 2.7649456521739132e-06,
|
|
"loss": 10.7885,
|
|
"step": 2630
|
|
},
|
|
{
|
|
"epoch": 2.2996515679442506,
|
|
"grad_norm": 0.09053708612918854,
|
|
"learning_rate": 2.730978260869566e-06,
|
|
"loss": 10.7881,
|
|
"step": 2640
|
|
},
|
|
{
|
|
"epoch": 2.308362369337979,
|
|
"grad_norm": 0.09515667706727982,
|
|
"learning_rate": 2.6970108695652176e-06,
|
|
"loss": 10.7882,
|
|
"step": 2650
|
|
},
|
|
{
|
|
"epoch": 2.317073170731707,
|
|
"grad_norm": 0.09767191112041473,
|
|
"learning_rate": 2.6630434782608698e-06,
|
|
"loss": 10.7886,
|
|
"step": 2660
|
|
},
|
|
{
|
|
"epoch": 2.3257839721254356,
|
|
"grad_norm": 0.09252949804067612,
|
|
"learning_rate": 2.629076086956522e-06,
|
|
"loss": 10.7883,
|
|
"step": 2670
|
|
},
|
|
{
|
|
"epoch": 2.3344947735191637,
|
|
"grad_norm": 0.09851150959730148,
|
|
"learning_rate": 2.595108695652174e-06,
|
|
"loss": 10.7878,
|
|
"step": 2680
|
|
},
|
|
{
|
|
"epoch": 2.343205574912892,
|
|
"grad_norm": 0.09727566689252853,
|
|
"learning_rate": 2.5611413043478263e-06,
|
|
"loss": 10.7894,
|
|
"step": 2690
|
|
},
|
|
{
|
|
"epoch": 2.35191637630662,
|
|
"grad_norm": 0.09644806385040283,
|
|
"learning_rate": 2.5271739130434785e-06,
|
|
"loss": 10.7872,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 2.3606271777003482,
|
|
"grad_norm": 0.09369703382253647,
|
|
"learning_rate": 2.4932065217391306e-06,
|
|
"loss": 10.788,
|
|
"step": 2710
|
|
},
|
|
{
|
|
"epoch": 2.3693379790940767,
|
|
"grad_norm": 0.0991264134645462,
|
|
"learning_rate": 2.459239130434783e-06,
|
|
"loss": 10.788,
|
|
"step": 2720
|
|
},
|
|
{
|
|
"epoch": 2.3780487804878048,
|
|
"grad_norm": 0.09739573299884796,
|
|
"learning_rate": 2.425271739130435e-06,
|
|
"loss": 10.7879,
|
|
"step": 2730
|
|
},
|
|
{
|
|
"epoch": 2.3867595818815333,
|
|
"grad_norm": 0.10022178292274475,
|
|
"learning_rate": 2.391304347826087e-06,
|
|
"loss": 10.7881,
|
|
"step": 2740
|
|
},
|
|
{
|
|
"epoch": 2.3954703832752613,
|
|
"grad_norm": 0.09441060572862625,
|
|
"learning_rate": 2.3573369565217393e-06,
|
|
"loss": 10.7879,
|
|
"step": 2750
|
|
},
|
|
{
|
|
"epoch": 2.40418118466899,
|
|
"grad_norm": 0.09166783839464188,
|
|
"learning_rate": 2.3233695652173915e-06,
|
|
"loss": 10.7873,
|
|
"step": 2760
|
|
},
|
|
{
|
|
"epoch": 2.412891986062718,
|
|
"grad_norm": 0.09163929522037506,
|
|
"learning_rate": 2.2894021739130437e-06,
|
|
"loss": 10.7878,
|
|
"step": 2770
|
|
},
|
|
{
|
|
"epoch": 2.421602787456446,
|
|
"grad_norm": 0.09094561636447906,
|
|
"learning_rate": 2.255434782608696e-06,
|
|
"loss": 10.7885,
|
|
"step": 2780
|
|
},
|
|
{
|
|
"epoch": 2.4303135888501743,
|
|
"grad_norm": 0.09401146322488785,
|
|
"learning_rate": 2.221467391304348e-06,
|
|
"loss": 10.7882,
|
|
"step": 2790
|
|
},
|
|
{
|
|
"epoch": 2.4390243902439024,
|
|
"grad_norm": 0.08646171540021896,
|
|
"learning_rate": 2.1875000000000002e-06,
|
|
"loss": 10.7958,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 2.447735191637631,
|
|
"grad_norm": 0.09187670797109604,
|
|
"learning_rate": 2.1535326086956524e-06,
|
|
"loss": 10.7879,
|
|
"step": 2810
|
|
},
|
|
{
|
|
"epoch": 2.456445993031359,
|
|
"grad_norm": 0.09049776941537857,
|
|
"learning_rate": 2.1195652173913046e-06,
|
|
"loss": 10.7887,
|
|
"step": 2820
|
|
},
|
|
{
|
|
"epoch": 2.465156794425087,
|
|
"grad_norm": 0.09695518016815186,
|
|
"learning_rate": 2.0855978260869568e-06,
|
|
"loss": 10.7878,
|
|
"step": 2830
|
|
},
|
|
{
|
|
"epoch": 2.4738675958188154,
|
|
"grad_norm": 0.09298545867204666,
|
|
"learning_rate": 2.051630434782609e-06,
|
|
"loss": 10.7885,
|
|
"step": 2840
|
|
},
|
|
{
|
|
"epoch": 2.4825783972125435,
|
|
"grad_norm": 0.09423473477363586,
|
|
"learning_rate": 2.017663043478261e-06,
|
|
"loss": 10.7879,
|
|
"step": 2850
|
|
},
|
|
{
|
|
"epoch": 2.491289198606272,
|
|
"grad_norm": 0.0882716029882431,
|
|
"learning_rate": 1.9836956521739133e-06,
|
|
"loss": 10.7877,
|
|
"step": 2860
|
|
},
|
|
{
|
|
"epoch": 2.5,
|
|
"grad_norm": 0.09123384207487106,
|
|
"learning_rate": 1.9497282608695655e-06,
|
|
"loss": 10.788,
|
|
"step": 2870
|
|
},
|
|
{
|
|
"epoch": 2.508710801393728,
|
|
"grad_norm": 0.09263134002685547,
|
|
"learning_rate": 1.9157608695652176e-06,
|
|
"loss": 10.788,
|
|
"step": 2880
|
|
},
|
|
{
|
|
"epoch": 2.5174216027874565,
|
|
"grad_norm": 0.09086143225431442,
|
|
"learning_rate": 1.8817934782608696e-06,
|
|
"loss": 10.7876,
|
|
"step": 2890
|
|
},
|
|
{
|
|
"epoch": 2.5261324041811846,
|
|
"grad_norm": 0.10285180062055588,
|
|
"learning_rate": 1.8478260869565218e-06,
|
|
"loss": 10.7872,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 2.534843205574913,
|
|
"grad_norm": 0.09717818349599838,
|
|
"learning_rate": 1.813858695652174e-06,
|
|
"loss": 10.7878,
|
|
"step": 2910
|
|
},
|
|
{
|
|
"epoch": 2.543554006968641,
|
|
"grad_norm": 0.09603049606084824,
|
|
"learning_rate": 1.7798913043478264e-06,
|
|
"loss": 10.7877,
|
|
"step": 2920
|
|
},
|
|
{
|
|
"epoch": 2.552264808362369,
|
|
"grad_norm": 0.09593737125396729,
|
|
"learning_rate": 1.7459239130434785e-06,
|
|
"loss": 10.7865,
|
|
"step": 2930
|
|
},
|
|
{
|
|
"epoch": 2.5609756097560976,
|
|
"grad_norm": 0.0944036915898323,
|
|
"learning_rate": 1.7119565217391307e-06,
|
|
"loss": 10.7876,
|
|
"step": 2940
|
|
},
|
|
{
|
|
"epoch": 2.5696864111498257,
|
|
"grad_norm": 0.09109403192996979,
|
|
"learning_rate": 1.6779891304347829e-06,
|
|
"loss": 10.7866,
|
|
"step": 2950
|
|
},
|
|
{
|
|
"epoch": 2.578397212543554,
|
|
"grad_norm": 0.09073559939861298,
|
|
"learning_rate": 1.644021739130435e-06,
|
|
"loss": 10.7882,
|
|
"step": 2960
|
|
},
|
|
{
|
|
"epoch": 2.587108013937282,
|
|
"grad_norm": 0.09691879898309708,
|
|
"learning_rate": 1.6100543478260872e-06,
|
|
"loss": 10.7886,
|
|
"step": 2970
|
|
},
|
|
{
|
|
"epoch": 2.59581881533101,
|
|
"grad_norm": 0.09810439497232437,
|
|
"learning_rate": 1.5760869565217394e-06,
|
|
"loss": 10.7873,
|
|
"step": 2980
|
|
},
|
|
{
|
|
"epoch": 2.6045296167247387,
|
|
"grad_norm": 0.09767334908246994,
|
|
"learning_rate": 1.5421195652173914e-06,
|
|
"loss": 10.7872,
|
|
"step": 2990
|
|
},
|
|
{
|
|
"epoch": 2.6132404181184667,
|
|
"grad_norm": 0.09681523591279984,
|
|
"learning_rate": 1.5081521739130436e-06,
|
|
"loss": 10.7872,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 2.6219512195121952,
|
|
"grad_norm": 0.10315357148647308,
|
|
"learning_rate": 1.4741847826086957e-06,
|
|
"loss": 10.7877,
|
|
"step": 3010
|
|
},
|
|
{
|
|
"epoch": 2.6306620209059233,
|
|
"grad_norm": 0.09027998894453049,
|
|
"learning_rate": 1.440217391304348e-06,
|
|
"loss": 10.794,
|
|
"step": 3020
|
|
},
|
|
{
|
|
"epoch": 2.6393728222996513,
|
|
"grad_norm": 0.09546509385108948,
|
|
"learning_rate": 1.40625e-06,
|
|
"loss": 10.7875,
|
|
"step": 3030
|
|
},
|
|
{
|
|
"epoch": 2.64808362369338,
|
|
"grad_norm": 0.09259118884801865,
|
|
"learning_rate": 1.3722826086956523e-06,
|
|
"loss": 10.787,
|
|
"step": 3040
|
|
},
|
|
{
|
|
"epoch": 2.6567944250871083,
|
|
"grad_norm": 0.09348276257514954,
|
|
"learning_rate": 1.3383152173913044e-06,
|
|
"loss": 10.7886,
|
|
"step": 3050
|
|
},
|
|
{
|
|
"epoch": 2.6655052264808363,
|
|
"grad_norm": 0.09209384769201279,
|
|
"learning_rate": 1.3043478260869566e-06,
|
|
"loss": 10.7878,
|
|
"step": 3060
|
|
},
|
|
{
|
|
"epoch": 2.6742160278745644,
|
|
"grad_norm": 0.09436801075935364,
|
|
"learning_rate": 1.2703804347826088e-06,
|
|
"loss": 10.7879,
|
|
"step": 3070
|
|
},
|
|
{
|
|
"epoch": 2.682926829268293,
|
|
"grad_norm": 0.09730960428714752,
|
|
"learning_rate": 1.236413043478261e-06,
|
|
"loss": 10.7882,
|
|
"step": 3080
|
|
},
|
|
{
|
|
"epoch": 2.691637630662021,
|
|
"grad_norm": 0.09457021951675415,
|
|
"learning_rate": 1.2024456521739131e-06,
|
|
"loss": 10.7878,
|
|
"step": 3090
|
|
},
|
|
{
|
|
"epoch": 2.7003484320557494,
|
|
"grad_norm": 0.09415573626756668,
|
|
"learning_rate": 1.1684782608695653e-06,
|
|
"loss": 10.7877,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 2.7090592334494774,
|
|
"grad_norm": 0.09336414933204651,
|
|
"learning_rate": 1.1345108695652175e-06,
|
|
"loss": 10.7863,
|
|
"step": 3110
|
|
},
|
|
{
|
|
"epoch": 2.7177700348432055,
|
|
"grad_norm": 0.09788176417350769,
|
|
"learning_rate": 1.1005434782608697e-06,
|
|
"loss": 10.7866,
|
|
"step": 3120
|
|
},
|
|
{
|
|
"epoch": 2.726480836236934,
|
|
"grad_norm": 0.10606147348880768,
|
|
"learning_rate": 1.0665760869565219e-06,
|
|
"loss": 10.788,
|
|
"step": 3130
|
|
},
|
|
{
|
|
"epoch": 2.735191637630662,
|
|
"grad_norm": 0.09581784158945084,
|
|
"learning_rate": 1.032608695652174e-06,
|
|
"loss": 10.7865,
|
|
"step": 3140
|
|
},
|
|
{
|
|
"epoch": 2.7439024390243905,
|
|
"grad_norm": 0.09258154779672623,
|
|
"learning_rate": 9.986413043478262e-07,
|
|
"loss": 10.7874,
|
|
"step": 3150
|
|
},
|
|
{
|
|
"epoch": 2.7526132404181185,
|
|
"grad_norm": 0.09044299274682999,
|
|
"learning_rate": 9.646739130434784e-07,
|
|
"loss": 10.7878,
|
|
"step": 3160
|
|
},
|
|
{
|
|
"epoch": 2.7613240418118465,
|
|
"grad_norm": 0.09484916925430298,
|
|
"learning_rate": 9.307065217391305e-07,
|
|
"loss": 10.787,
|
|
"step": 3170
|
|
},
|
|
{
|
|
"epoch": 2.770034843205575,
|
|
"grad_norm": 0.0974380299448967,
|
|
"learning_rate": 8.967391304347826e-07,
|
|
"loss": 10.7871,
|
|
"step": 3180
|
|
},
|
|
{
|
|
"epoch": 2.778745644599303,
|
|
"grad_norm": 0.093409463763237,
|
|
"learning_rate": 8.627717391304348e-07,
|
|
"loss": 10.7879,
|
|
"step": 3190
|
|
},
|
|
{
|
|
"epoch": 2.7874564459930316,
|
|
"grad_norm": 0.09446702152490616,
|
|
"learning_rate": 8.28804347826087e-07,
|
|
"loss": 10.7865,
|
|
"step": 3200
|
|
},
|
|
{
|
|
"epoch": 2.7961672473867596,
|
|
"grad_norm": 0.10579903423786163,
|
|
"learning_rate": 7.948369565217393e-07,
|
|
"loss": 10.7874,
|
|
"step": 3210
|
|
},
|
|
{
|
|
"epoch": 2.8048780487804876,
|
|
"grad_norm": 0.09646815061569214,
|
|
"learning_rate": 7.608695652173914e-07,
|
|
"loss": 10.787,
|
|
"step": 3220
|
|
},
|
|
{
|
|
"epoch": 2.813588850174216,
|
|
"grad_norm": 0.09360378235578537,
|
|
"learning_rate": 7.269021739130436e-07,
|
|
"loss": 10.7865,
|
|
"step": 3230
|
|
},
|
|
{
|
|
"epoch": 2.822299651567944,
|
|
"grad_norm": 0.09116079658269882,
|
|
"learning_rate": 6.929347826086957e-07,
|
|
"loss": 10.7874,
|
|
"step": 3240
|
|
},
|
|
{
|
|
"epoch": 2.8310104529616726,
|
|
"grad_norm": 0.09042556583881378,
|
|
"learning_rate": 6.589673913043479e-07,
|
|
"loss": 10.7871,
|
|
"step": 3250
|
|
},
|
|
{
|
|
"epoch": 2.8397212543554007,
|
|
"grad_norm": 0.09959082305431366,
|
|
"learning_rate": 6.25e-07,
|
|
"loss": 10.7863,
|
|
"step": 3260
|
|
},
|
|
{
|
|
"epoch": 2.8484320557491287,
|
|
"grad_norm": 0.09715747833251953,
|
|
"learning_rate": 5.910326086956522e-07,
|
|
"loss": 10.7879,
|
|
"step": 3270
|
|
},
|
|
{
|
|
"epoch": 2.857142857142857,
|
|
"grad_norm": 0.09984558820724487,
|
|
"learning_rate": 5.570652173913044e-07,
|
|
"loss": 10.7865,
|
|
"step": 3280
|
|
},
|
|
{
|
|
"epoch": 2.8658536585365852,
|
|
"grad_norm": 0.09477663040161133,
|
|
"learning_rate": 5.230978260869566e-07,
|
|
"loss": 10.7873,
|
|
"step": 3290
|
|
},
|
|
{
|
|
"epoch": 2.8745644599303137,
|
|
"grad_norm": 0.09480907022953033,
|
|
"learning_rate": 4.891304347826088e-07,
|
|
"loss": 10.7874,
|
|
"step": 3300
|
|
},
|
|
{
|
|
"epoch": 2.8832752613240418,
|
|
"grad_norm": 0.09444177150726318,
|
|
"learning_rate": 4.5516304347826094e-07,
|
|
"loss": 10.787,
|
|
"step": 3310
|
|
},
|
|
{
|
|
"epoch": 2.89198606271777,
|
|
"grad_norm": 0.09742891043424606,
|
|
"learning_rate": 4.2119565217391306e-07,
|
|
"loss": 10.7869,
|
|
"step": 3320
|
|
},
|
|
{
|
|
"epoch": 2.9006968641114983,
|
|
"grad_norm": 0.09473706036806107,
|
|
"learning_rate": 3.8722826086956524e-07,
|
|
"loss": 10.7873,
|
|
"step": 3330
|
|
},
|
|
{
|
|
"epoch": 2.9094076655052263,
|
|
"grad_norm": 0.09218768030405045,
|
|
"learning_rate": 3.532608695652174e-07,
|
|
"loss": 10.7871,
|
|
"step": 3340
|
|
},
|
|
{
|
|
"epoch": 2.918118466898955,
|
|
"grad_norm": 0.09080421179533005,
|
|
"learning_rate": 3.1929347826086964e-07,
|
|
"loss": 10.7868,
|
|
"step": 3350
|
|
},
|
|
{
|
|
"epoch": 2.926829268292683,
|
|
"grad_norm": 0.09067242592573166,
|
|
"learning_rate": 2.8532608695652177e-07,
|
|
"loss": 10.787,
|
|
"step": 3360
|
|
},
|
|
{
|
|
"epoch": 2.935540069686411,
|
|
"grad_norm": 0.09306979924440384,
|
|
"learning_rate": 2.5135869565217394e-07,
|
|
"loss": 10.7869,
|
|
"step": 3370
|
|
},
|
|
{
|
|
"epoch": 2.9442508710801394,
|
|
"grad_norm": 0.09051218628883362,
|
|
"learning_rate": 2.173913043478261e-07,
|
|
"loss": 10.7861,
|
|
"step": 3380
|
|
},
|
|
{
|
|
"epoch": 2.952961672473868,
|
|
"grad_norm": 0.09443790465593338,
|
|
"learning_rate": 1.834239130434783e-07,
|
|
"loss": 10.7871,
|
|
"step": 3390
|
|
},
|
|
{
|
|
"epoch": 2.961672473867596,
|
|
"grad_norm": 0.09303121268749237,
|
|
"learning_rate": 1.4945652173913045e-07,
|
|
"loss": 10.7871,
|
|
"step": 3400
|
|
},
|
|
{
|
|
"epoch": 2.970383275261324,
|
|
"grad_norm": 0.095026396214962,
|
|
"learning_rate": 1.1548913043478261e-07,
|
|
"loss": 10.7878,
|
|
"step": 3410
|
|
},
|
|
{
|
|
"epoch": 2.979094076655052,
|
|
"grad_norm": 0.09600502252578735,
|
|
"learning_rate": 8.152173913043479e-08,
|
|
"loss": 10.7873,
|
|
"step": 3420
|
|
},
|
|
{
|
|
"epoch": 2.9878048780487805,
|
|
"grad_norm": 0.09918372333049774,
|
|
"learning_rate": 4.7554347826086966e-08,
|
|
"loss": 10.7882,
|
|
"step": 3430
|
|
},
|
|
{
|
|
"epoch": 2.996515679442509,
|
|
"grad_norm": 0.10064807534217834,
|
|
"learning_rate": 1.3586956521739131e-08,
|
|
"loss": 10.7876,
|
|
"step": 3440
|
|
}
|
|
],
|
|
"logging_steps": 10,
|
|
"max_steps": 3444,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 3,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 1697588517943296.0,
|
|
"train_batch_size": 32,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|