|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 9480, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010548523206751054, |
|
"grad_norm": 1.2911320924758911, |
|
"learning_rate": 0.00015822784810126583, |
|
"loss": 7.5306, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002109704641350211, |
|
"grad_norm": 1.1738190650939941, |
|
"learning_rate": 0.00031645569620253165, |
|
"loss": 6.9264, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0031645569620253164, |
|
"grad_norm": 0.8675833940505981, |
|
"learning_rate": 0.00047468354430379745, |
|
"loss": 6.2658, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004219409282700422, |
|
"grad_norm": 1.2689286470413208, |
|
"learning_rate": 0.0006329113924050633, |
|
"loss": 5.7667, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.005274261603375527, |
|
"grad_norm": 0.8328885436058044, |
|
"learning_rate": 0.0007911392405063291, |
|
"loss": 5.3006, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.006329113924050633, |
|
"grad_norm": 0.6221523284912109, |
|
"learning_rate": 0.0009493670886075949, |
|
"loss": 4.7854, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.007383966244725738, |
|
"grad_norm": 0.7859921455383301, |
|
"learning_rate": 0.0011075949367088608, |
|
"loss": 4.3737, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.008438818565400843, |
|
"grad_norm": 1.4790822267532349, |
|
"learning_rate": 0.0012658227848101266, |
|
"loss": 4.1257, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00949367088607595, |
|
"grad_norm": 0.84088134765625, |
|
"learning_rate": 0.0014240506329113926, |
|
"loss": 3.927, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.010548523206751054, |
|
"grad_norm": 0.9328136444091797, |
|
"learning_rate": 0.0015, |
|
"loss": 3.7791, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011603375527426161, |
|
"grad_norm": 0.8343794941902161, |
|
"learning_rate": 0.0015, |
|
"loss": 3.6241, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.012658227848101266, |
|
"grad_norm": 1.082481026649475, |
|
"learning_rate": 0.0015, |
|
"loss": 3.513, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.013713080168776372, |
|
"grad_norm": 0.7395880222320557, |
|
"learning_rate": 0.0015, |
|
"loss": 3.4026, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.014767932489451477, |
|
"grad_norm": 0.8911088109016418, |
|
"learning_rate": 0.0015, |
|
"loss": 3.3082, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.015822784810126583, |
|
"grad_norm": 1.0754770040512085, |
|
"learning_rate": 0.0015, |
|
"loss": 3.2323, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.016877637130801686, |
|
"grad_norm": 0.7520929574966431, |
|
"learning_rate": 0.0015, |
|
"loss": 3.1652, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.017932489451476793, |
|
"grad_norm": 1.0018035173416138, |
|
"learning_rate": 0.0015, |
|
"loss": 3.0882, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0189873417721519, |
|
"grad_norm": 0.8712095618247986, |
|
"learning_rate": 0.0015, |
|
"loss": 3.0627, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.020042194092827006, |
|
"grad_norm": 0.6786903142929077, |
|
"learning_rate": 0.0015, |
|
"loss": 2.989, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.02109704641350211, |
|
"grad_norm": 0.887618899345398, |
|
"learning_rate": 0.0015, |
|
"loss": 2.9474, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.022151898734177215, |
|
"grad_norm": 0.6775170564651489, |
|
"learning_rate": 0.0015, |
|
"loss": 2.9038, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.023206751054852322, |
|
"grad_norm": 0.7518839836120605, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8697, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.024261603375527425, |
|
"grad_norm": 0.7377467155456543, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8184, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.02531645569620253, |
|
"grad_norm": 0.9767955541610718, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7812, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.026371308016877638, |
|
"grad_norm": 0.8166293501853943, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7466, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.027426160337552744, |
|
"grad_norm": 0.7962077260017395, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7132, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.028481012658227847, |
|
"grad_norm": 0.8260292410850525, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6831, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.029535864978902954, |
|
"grad_norm": 0.9678518176078796, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6457, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.03059071729957806, |
|
"grad_norm": 0.8574512004852295, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6198, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.03164556962025317, |
|
"grad_norm": 1.0233553647994995, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5961, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.03270042194092827, |
|
"grad_norm": 0.8460607528686523, |
|
"learning_rate": 0.0015, |
|
"loss": 2.574, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03375527426160337, |
|
"grad_norm": 0.9686404466629028, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5576, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03481012658227848, |
|
"grad_norm": 0.731982946395874, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5384, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.035864978902953586, |
|
"grad_norm": 1.0275819301605225, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5105, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03691983122362869, |
|
"grad_norm": 0.9130907654762268, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4707, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0379746835443038, |
|
"grad_norm": 0.802019476890564, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4571, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.039029535864978905, |
|
"grad_norm": 1.0577914714813232, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4457, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.04008438818565401, |
|
"grad_norm": 0.8877514004707336, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4274, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.04113924050632911, |
|
"grad_norm": 1.0621153116226196, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4058, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.04219409282700422, |
|
"grad_norm": 0.8034183382987976, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3812, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.043248945147679324, |
|
"grad_norm": 1.1909996271133423, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3706, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.04430379746835443, |
|
"grad_norm": 0.8821457028388977, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3519, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.04535864978902954, |
|
"grad_norm": 0.877312183380127, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3309, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.046413502109704644, |
|
"grad_norm": 0.7695094347000122, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3125, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04746835443037975, |
|
"grad_norm": 0.9509552717208862, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3128, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.04852320675105485, |
|
"grad_norm": 0.7557908296585083, |
|
"learning_rate": 0.0015, |
|
"loss": 2.293, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.049578059071729956, |
|
"grad_norm": 0.9517701864242554, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2694, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.05063291139240506, |
|
"grad_norm": 0.8353269696235657, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2722, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.05168776371308017, |
|
"grad_norm": 0.8538005948066711, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2391, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.052742616033755275, |
|
"grad_norm": 0.7373113632202148, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2339, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05379746835443038, |
|
"grad_norm": 1.0332047939300537, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2247, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.05485232067510549, |
|
"grad_norm": 0.8454136848449707, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2092, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.05590717299578059, |
|
"grad_norm": 0.7165494561195374, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1931, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.056962025316455694, |
|
"grad_norm": 0.7286007404327393, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1751, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.0580168776371308, |
|
"grad_norm": 0.7424426674842834, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1783, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.05907172995780591, |
|
"grad_norm": 0.7351111173629761, |
|
"learning_rate": 0.0015, |
|
"loss": 2.146, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.060126582278481014, |
|
"grad_norm": 1.0205843448638916, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1503, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.06118143459915612, |
|
"grad_norm": 1.1331634521484375, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1545, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.06223628691983123, |
|
"grad_norm": 1.3081284761428833, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1313, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.06329113924050633, |
|
"grad_norm": 1.0923019647598267, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1306, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.06434599156118144, |
|
"grad_norm": 1.1899714469909668, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1104, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.06540084388185655, |
|
"grad_norm": 0.8574506044387817, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0926, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.06645569620253164, |
|
"grad_norm": 0.9749586582183838, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0799, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.06751054852320675, |
|
"grad_norm": 0.7181791663169861, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0941, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.06856540084388185, |
|
"grad_norm": 0.736224353313446, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0857, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.06962025316455696, |
|
"grad_norm": 0.9573279023170471, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0748, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.07067510548523206, |
|
"grad_norm": 0.7422716021537781, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0621, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.07172995780590717, |
|
"grad_norm": 0.7074818015098572, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0523, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.07278481012658228, |
|
"grad_norm": 0.7281100749969482, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0635, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.07383966244725738, |
|
"grad_norm": 0.7090765833854675, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0448, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.07489451476793249, |
|
"grad_norm": 0.727451741695404, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0325, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.0759493670886076, |
|
"grad_norm": 0.7485256195068359, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0135, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.0770042194092827, |
|
"grad_norm": 0.8138155341148376, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0065, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.07805907172995781, |
|
"grad_norm": 0.6835618615150452, |
|
"learning_rate": 0.0015, |
|
"loss": 2.014, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.07911392405063292, |
|
"grad_norm": 1.0360156297683716, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9957, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.08016877637130802, |
|
"grad_norm": 1.390318751335144, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0014, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.08122362869198312, |
|
"grad_norm": 0.989683985710144, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0001, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.08227848101265822, |
|
"grad_norm": 0.9884315729141235, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9788, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 0.6850045919418335, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9818, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.08438818565400844, |
|
"grad_norm": 0.7124843597412109, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9591, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.08544303797468354, |
|
"grad_norm": 0.7981064319610596, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9474, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.08649789029535865, |
|
"grad_norm": 0.8757826685905457, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9577, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.08755274261603375, |
|
"grad_norm": 0.9261611700057983, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9614, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.08860759493670886, |
|
"grad_norm": 0.7376549243927002, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9399, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.08966244725738397, |
|
"grad_norm": 0.6958174109458923, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9405, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.09071729957805907, |
|
"grad_norm": 0.7377433180809021, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9374, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.09177215189873418, |
|
"grad_norm": 0.659022867679596, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9339, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.09282700421940929, |
|
"grad_norm": 0.7705568075180054, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9272, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.0938818565400844, |
|
"grad_norm": 1.073669672012329, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9159, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.0949367088607595, |
|
"grad_norm": 1.3000624179840088, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9168, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.09599156118143459, |
|
"grad_norm": 0.817836344242096, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9206, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.0970464135021097, |
|
"grad_norm": 0.6906339526176453, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9065, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.0981012658227848, |
|
"grad_norm": 0.7800134420394897, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8938, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.09915611814345991, |
|
"grad_norm": 0.7053550481796265, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8986, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.10021097046413502, |
|
"grad_norm": 1.0190623998641968, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8828, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 0.7127584218978882, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8896, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.10232067510548523, |
|
"grad_norm": 1.0691441297531128, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8884, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.10337552742616034, |
|
"grad_norm": 0.6816598773002625, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8671, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.10443037974683544, |
|
"grad_norm": 0.7380853295326233, |
|
"learning_rate": 0.0015, |
|
"loss": 1.867, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.10548523206751055, |
|
"grad_norm": 0.8465728759765625, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8785, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.10654008438818566, |
|
"grad_norm": 0.7769269943237305, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8691, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.10759493670886076, |
|
"grad_norm": 1.0904594659805298, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8588, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.10864978902953587, |
|
"grad_norm": 1.2031594514846802, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8605, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.10970464135021098, |
|
"grad_norm": 0.7433950901031494, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8515, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.11075949367088607, |
|
"grad_norm": 0.7069742679595947, |
|
"learning_rate": 0.0015, |
|
"loss": 1.854, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.11181434599156118, |
|
"grad_norm": 0.6981137990951538, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8432, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.11286919831223628, |
|
"grad_norm": 0.6770772933959961, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8311, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.11392405063291139, |
|
"grad_norm": 0.6839622855186462, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8326, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.1149789029535865, |
|
"grad_norm": 0.6609824895858765, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8409, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1160337552742616, |
|
"grad_norm": 0.7277442216873169, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8276, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.11708860759493671, |
|
"grad_norm": 0.7287368774414062, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8329, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.11814345991561181, |
|
"grad_norm": 0.7724989652633667, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8279, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.11919831223628692, |
|
"grad_norm": 0.7109525203704834, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8134, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.12025316455696203, |
|
"grad_norm": 0.7559357285499573, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8262, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.12130801687763713, |
|
"grad_norm": 1.1389662027359009, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8134, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.12236286919831224, |
|
"grad_norm": 0.9472622871398926, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8176, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.12341772151898735, |
|
"grad_norm": 0.6383090019226074, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8033, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.12447257383966245, |
|
"grad_norm": 0.8101405501365662, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7904, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.12552742616033755, |
|
"grad_norm": 0.655144453048706, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7934, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.12658227848101267, |
|
"grad_norm": 0.9537879824638367, |
|
"learning_rate": 0.0015, |
|
"loss": 1.808, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.12763713080168776, |
|
"grad_norm": 0.8779239654541016, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7989, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.12869198312236288, |
|
"grad_norm": 0.7360990643501282, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7854, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.12974683544303797, |
|
"grad_norm": 0.6458498239517212, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7767, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.1308016877637131, |
|
"grad_norm": 0.7693349123001099, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7905, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.13185654008438819, |
|
"grad_norm": 0.6853756308555603, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7936, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.13291139240506328, |
|
"grad_norm": 0.9074394106864929, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7796, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.1339662447257384, |
|
"grad_norm": 0.6336771249771118, |
|
"learning_rate": 0.0015, |
|
"loss": 1.777, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.1350210970464135, |
|
"grad_norm": 0.785158097743988, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7892, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.1360759493670886, |
|
"grad_norm": 0.9094076156616211, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7746, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.1371308016877637, |
|
"grad_norm": 0.7252681851387024, |
|
"learning_rate": 0.0015, |
|
"loss": 1.761, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.13818565400843882, |
|
"grad_norm": 0.7606396079063416, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7692, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.13924050632911392, |
|
"grad_norm": 0.7315070629119873, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7717, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.14029535864978904, |
|
"grad_norm": 1.215480089187622, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7627, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.14135021097046413, |
|
"grad_norm": 0.7350618839263916, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7627, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.14240506329113925, |
|
"grad_norm": 0.7639700174331665, |
|
"learning_rate": 0.0015, |
|
"loss": 1.767, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.14345991561181434, |
|
"grad_norm": 0.6382935643196106, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7592, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.14451476793248946, |
|
"grad_norm": 0.6957666873931885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7538, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.14556962025316456, |
|
"grad_norm": 0.7660850882530212, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7445, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.14662447257383968, |
|
"grad_norm": 1.0359055995941162, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7563, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.14767932489451477, |
|
"grad_norm": 1.3672610521316528, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7391, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.14873417721518986, |
|
"grad_norm": 1.2046788930892944, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7483, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.14978902953586498, |
|
"grad_norm": 0.7581397891044617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7504, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.15084388185654007, |
|
"grad_norm": 0.7376483678817749, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7375, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 0.661811888217926, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7304, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.1529535864978903, |
|
"grad_norm": 0.6791737675666809, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7312, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.1540084388185654, |
|
"grad_norm": 0.6737160682678223, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7297, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.1550632911392405, |
|
"grad_norm": 0.8996027708053589, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7299, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.15611814345991562, |
|
"grad_norm": 0.9724168181419373, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7306, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.1571729957805907, |
|
"grad_norm": 1.112387776374817, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7305, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.15822784810126583, |
|
"grad_norm": 0.6346387267112732, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7287, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.15928270042194093, |
|
"grad_norm": 0.6662168502807617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.723, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.16033755274261605, |
|
"grad_norm": 0.8017670512199402, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7262, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.16139240506329114, |
|
"grad_norm": 0.805647611618042, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7106, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.16244725738396623, |
|
"grad_norm": 0.6892802119255066, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7026, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.16350210970464135, |
|
"grad_norm": 1.0336511135101318, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7047, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.16455696202531644, |
|
"grad_norm": 0.6698900461196899, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7082, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.16561181434599156, |
|
"grad_norm": 0.7654104828834534, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6972, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.6756095290184021, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7089, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.16772151898734178, |
|
"grad_norm": 0.9509694576263428, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7045, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.16877637130801687, |
|
"grad_norm": 0.712745726108551, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7096, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.169831223628692, |
|
"grad_norm": 0.655924379825592, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7065, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.17088607594936708, |
|
"grad_norm": 0.6995239853858948, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6769, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.1719409282700422, |
|
"grad_norm": 0.8030900955200195, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6908, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.1729957805907173, |
|
"grad_norm": 0.6829028129577637, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6939, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.17405063291139242, |
|
"grad_norm": 1.173487901687622, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6842, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.1751054852320675, |
|
"grad_norm": 0.6973627805709839, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6833, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.17616033755274263, |
|
"grad_norm": 0.833595871925354, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6944, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.17721518987341772, |
|
"grad_norm": 1.1782370805740356, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6824, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.17827004219409281, |
|
"grad_norm": 0.7128448486328125, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6761, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.17932489451476794, |
|
"grad_norm": 0.8215356469154358, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6916, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.18037974683544303, |
|
"grad_norm": 0.6329225301742554, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6724, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.18143459915611815, |
|
"grad_norm": 0.6323493123054504, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6685, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.18248945147679324, |
|
"grad_norm": 0.6220436692237854, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6703, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.18354430379746836, |
|
"grad_norm": 0.6482318043708801, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6786, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.18459915611814345, |
|
"grad_norm": 0.6890340447425842, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6931, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.18565400843881857, |
|
"grad_norm": 0.6083322167396545, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6757, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.18670886075949367, |
|
"grad_norm": 0.6560311317443848, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6763, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.1877637130801688, |
|
"grad_norm": 0.6504945158958435, |
|
"learning_rate": 0.0015, |
|
"loss": 1.677, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.18881856540084388, |
|
"grad_norm": 0.6659324169158936, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6772, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.189873417721519, |
|
"grad_norm": 0.7314705848693848, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6742, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.1909282700421941, |
|
"grad_norm": 0.7147253751754761, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6619, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.19198312236286919, |
|
"grad_norm": 0.797636091709137, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6628, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.1930379746835443, |
|
"grad_norm": 0.6524857878684998, |
|
"learning_rate": 0.0015, |
|
"loss": 1.658, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.1940928270042194, |
|
"grad_norm": 0.6256518363952637, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6571, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.19514767932489452, |
|
"grad_norm": 0.8984813094139099, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6586, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.1962025316455696, |
|
"grad_norm": 0.7350126504898071, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6625, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.19725738396624473, |
|
"grad_norm": 0.7215333580970764, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6628, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.19831223628691982, |
|
"grad_norm": 0.6555130481719971, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6525, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.19936708860759494, |
|
"grad_norm": 0.7172640562057495, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6563, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.20042194092827004, |
|
"grad_norm": 0.8645635843276978, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6514, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.20147679324894516, |
|
"grad_norm": 0.8532048463821411, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6634, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.8084898591041565, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6496, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.20358649789029537, |
|
"grad_norm": 0.6495212316513062, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6468, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.20464135021097046, |
|
"grad_norm": 0.6420271396636963, |
|
"learning_rate": 0.0015, |
|
"loss": 1.641, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.20569620253164558, |
|
"grad_norm": 0.6756624579429626, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6408, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.20675105485232068, |
|
"grad_norm": 0.632693350315094, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6454, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.20780590717299577, |
|
"grad_norm": 0.7314813137054443, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6523, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.2088607594936709, |
|
"grad_norm": 0.6397210955619812, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6369, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.20991561181434598, |
|
"grad_norm": 0.6606935262680054, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6346, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.2109704641350211, |
|
"grad_norm": 0.6740031242370605, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6339, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2120253164556962, |
|
"grad_norm": 0.720748245716095, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6401, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.21308016877637131, |
|
"grad_norm": 1.1676092147827148, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6498, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.2141350210970464, |
|
"grad_norm": 0.6337310075759888, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6379, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.21518987341772153, |
|
"grad_norm": 0.8364405035972595, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6366, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.21624472573839662, |
|
"grad_norm": 0.7344895005226135, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6351, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.21729957805907174, |
|
"grad_norm": 0.9234893918037415, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6269, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.21835443037974683, |
|
"grad_norm": 0.7972549796104431, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6302, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.21940928270042195, |
|
"grad_norm": 0.6978487372398376, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6337, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.22046413502109705, |
|
"grad_norm": 0.766801655292511, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6191, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.22151898734177214, |
|
"grad_norm": 1.0701206922531128, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6314, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.22257383966244726, |
|
"grad_norm": 0.6192570328712463, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6205, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.22362869198312235, |
|
"grad_norm": 0.5801493525505066, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6181, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.22468354430379747, |
|
"grad_norm": 0.6492977142333984, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6231, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.22573839662447256, |
|
"grad_norm": 0.574461817741394, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6173, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.22679324894514769, |
|
"grad_norm": 0.7691101431846619, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6237, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.22784810126582278, |
|
"grad_norm": 0.9927956461906433, |
|
"learning_rate": 0.0015, |
|
"loss": 1.635, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.2289029535864979, |
|
"grad_norm": 0.6871766448020935, |
|
"learning_rate": 0.0015, |
|
"loss": 1.62, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.229957805907173, |
|
"grad_norm": 0.5886237025260925, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6045, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.2310126582278481, |
|
"grad_norm": 0.6547583341598511, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6228, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.2320675105485232, |
|
"grad_norm": 0.6600242853164673, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6188, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.23312236286919832, |
|
"grad_norm": 0.797605037689209, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6039, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.23417721518987342, |
|
"grad_norm": 0.6497875452041626, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6185, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.23523206751054854, |
|
"grad_norm": 0.782981276512146, |
|
"learning_rate": 0.0015, |
|
"loss": 1.613, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.23628691983122363, |
|
"grad_norm": 0.7650336623191833, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6109, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.23734177215189872, |
|
"grad_norm": 1.1431978940963745, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6079, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.23839662447257384, |
|
"grad_norm": 0.9392552971839905, |
|
"learning_rate": 0.0015, |
|
"loss": 1.599, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.23945147679324894, |
|
"grad_norm": 0.7382718324661255, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6302, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.24050632911392406, |
|
"grad_norm": 0.6001378893852234, |
|
"learning_rate": 0.0015, |
|
"loss": 1.617, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.24156118143459915, |
|
"grad_norm": 0.6126279830932617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5995, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.24261603375527427, |
|
"grad_norm": 0.6365258097648621, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5984, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.24367088607594936, |
|
"grad_norm": 0.6386217474937439, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5997, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.24472573839662448, |
|
"grad_norm": 0.6965967416763306, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6123, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.24578059071729957, |
|
"grad_norm": 0.6952495574951172, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5944, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.2468354430379747, |
|
"grad_norm": 0.8141445517539978, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5891, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.2478902953586498, |
|
"grad_norm": 0.8598236441612244, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5927, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.2489451476793249, |
|
"grad_norm": 0.9009906053543091, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6005, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.9204186201095581, |
|
"learning_rate": 0.0015, |
|
"loss": 1.599, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.2510548523206751, |
|
"grad_norm": 0.7122026085853577, |
|
"learning_rate": 0.0015, |
|
"loss": 1.597, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.2521097046413502, |
|
"grad_norm": 0.6895260214805603, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5961, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.5801357626914978, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5932, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.2542194092827004, |
|
"grad_norm": 0.7699609398841858, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5971, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.2552742616033755, |
|
"grad_norm": 0.7428116202354431, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5941, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.2563291139240506, |
|
"grad_norm": 0.651231050491333, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5936, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.25738396624472576, |
|
"grad_norm": 0.7375125885009766, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5993, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.25843881856540085, |
|
"grad_norm": 0.635988712310791, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5849, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.25949367088607594, |
|
"grad_norm": 0.601569414138794, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5997, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.26054852320675104, |
|
"grad_norm": 0.8157471418380737, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5897, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.2616033755274262, |
|
"grad_norm": 0.7209903597831726, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5945, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.2626582278481013, |
|
"grad_norm": 0.6519895792007446, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5806, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.26371308016877637, |
|
"grad_norm": 0.7864737510681152, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5839, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.26476793248945146, |
|
"grad_norm": 0.6227979063987732, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5865, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.26582278481012656, |
|
"grad_norm": 0.6954833269119263, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5903, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.2668776371308017, |
|
"grad_norm": 0.640419065952301, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5808, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.2679324894514768, |
|
"grad_norm": 1.0571954250335693, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5849, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.2689873417721519, |
|
"grad_norm": 0.7716902494430542, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5787, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.270042194092827, |
|
"grad_norm": 0.627837598323822, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5874, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.27109704641350213, |
|
"grad_norm": 0.6312415599822998, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5804, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.2721518987341772, |
|
"grad_norm": 0.5654999017715454, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5793, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.2732067510548523, |
|
"grad_norm": 0.6043466329574585, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5861, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.2742616033755274, |
|
"grad_norm": 0.6311189532279968, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5872, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.27531645569620256, |
|
"grad_norm": 0.6845487952232361, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5839, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.27637130801687765, |
|
"grad_norm": 0.6696321368217468, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5808, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.27742616033755274, |
|
"grad_norm": 0.7458539009094238, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5731, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.27848101265822783, |
|
"grad_norm": 0.6243004202842712, |
|
"learning_rate": 0.0015, |
|
"loss": 1.582, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.2795358649789029, |
|
"grad_norm": 0.7745714783668518, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5747, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.2805907172995781, |
|
"grad_norm": 0.6416304707527161, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5663, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.28164556962025317, |
|
"grad_norm": 0.5748112201690674, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5675, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.28270042194092826, |
|
"grad_norm": 0.662480890750885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5698, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.28375527426160335, |
|
"grad_norm": 0.7659668326377869, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5688, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.2848101265822785, |
|
"grad_norm": 0.5924683809280396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5646, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.2858649789029536, |
|
"grad_norm": 0.6555392742156982, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5703, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.2869198312236287, |
|
"grad_norm": 0.56369549036026, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5708, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.2879746835443038, |
|
"grad_norm": 0.674506664276123, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5524, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.2890295358649789, |
|
"grad_norm": 0.6215850710868835, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5631, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.290084388185654, |
|
"grad_norm": 0.5844721794128418, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5713, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.2911392405063291, |
|
"grad_norm": 0.8595011234283447, |
|
"learning_rate": 0.0015, |
|
"loss": 1.577, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.2921940928270042, |
|
"grad_norm": 0.6657257080078125, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5717, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.29324894514767935, |
|
"grad_norm": 0.6913807988166809, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5685, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.29430379746835444, |
|
"grad_norm": 0.5601217150688171, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5645, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.29535864978902954, |
|
"grad_norm": 0.6473618745803833, |
|
"learning_rate": 0.0015, |
|
"loss": 1.562, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.29641350210970463, |
|
"grad_norm": 0.5763059854507446, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5653, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.2974683544303797, |
|
"grad_norm": 0.6512886881828308, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5714, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.29852320675105487, |
|
"grad_norm": 0.9278571605682373, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5471, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.29957805907172996, |
|
"grad_norm": 1.1715987920761108, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5512, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.30063291139240506, |
|
"grad_norm": 0.7548184990882874, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5553, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.30168776371308015, |
|
"grad_norm": 0.6434464454650879, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5572, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.3027426160337553, |
|
"grad_norm": 0.9384394288063049, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5584, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 0.7197995781898499, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5463, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.3048523206751055, |
|
"grad_norm": 0.6241549849510193, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5513, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.3059071729957806, |
|
"grad_norm": 0.6179733872413635, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5582, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.3069620253164557, |
|
"grad_norm": 0.7129524946212769, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5592, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.3080168776371308, |
|
"grad_norm": 0.7332343459129333, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5523, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.3090717299578059, |
|
"grad_norm": 0.6103625893592834, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5479, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.310126582278481, |
|
"grad_norm": 0.6277995705604553, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5548, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.3111814345991561, |
|
"grad_norm": 0.6909558773040771, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5628, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.31223628691983124, |
|
"grad_norm": 1.1366920471191406, |
|
"learning_rate": 0.0015, |
|
"loss": 1.548, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.31329113924050633, |
|
"grad_norm": 1.1065442562103271, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5391, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.3143459915611814, |
|
"grad_norm": 0.6319011449813843, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5451, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.3154008438818565, |
|
"grad_norm": 0.6630522608757019, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5607, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.31645569620253167, |
|
"grad_norm": 0.6209017634391785, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5387, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.31751054852320676, |
|
"grad_norm": 0.5443456768989563, |
|
"learning_rate": 0.0015, |
|
"loss": 1.546, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.31856540084388185, |
|
"grad_norm": 0.6264317035675049, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5462, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.31962025316455694, |
|
"grad_norm": 0.6058927774429321, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5356, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.3206751054852321, |
|
"grad_norm": 0.6220033168792725, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5442, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.3217299578059072, |
|
"grad_norm": 0.6217190027236938, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5394, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.3227848101265823, |
|
"grad_norm": 0.5777936577796936, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5479, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.32383966244725737, |
|
"grad_norm": 0.6267879605293274, |
|
"learning_rate": 0.0015, |
|
"loss": 1.541, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.32489451476793246, |
|
"grad_norm": 0.6611675024032593, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5295, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.3259493670886076, |
|
"grad_norm": 0.6568845510482788, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5486, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.3270042194092827, |
|
"grad_norm": 0.6635953783988953, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5361, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.3280590717299578, |
|
"grad_norm": 0.6701586842536926, |
|
"learning_rate": 0.0015, |
|
"loss": 1.539, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.3291139240506329, |
|
"grad_norm": 0.5741431713104248, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5402, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.33016877637130804, |
|
"grad_norm": 0.5856887698173523, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5504, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.33122362869198313, |
|
"grad_norm": 0.6227253675460815, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5286, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.3322784810126582, |
|
"grad_norm": 0.5713142156600952, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5379, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.5559601783752441, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5388, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.33438818565400846, |
|
"grad_norm": 0.693455159664154, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5439, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.33544303797468356, |
|
"grad_norm": 0.6805558800697327, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5373, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.33649789029535865, |
|
"grad_norm": 0.8250734210014343, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5352, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.33755274261603374, |
|
"grad_norm": 0.6972458958625793, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5329, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.33860759493670883, |
|
"grad_norm": 0.6945505142211914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5413, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.339662447257384, |
|
"grad_norm": 0.7304695248603821, |
|
"learning_rate": 0.0015, |
|
"loss": 1.536, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.3407172995780591, |
|
"grad_norm": 0.7209477424621582, |
|
"learning_rate": 0.0015, |
|
"loss": 1.529, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.34177215189873417, |
|
"grad_norm": 0.5435463190078735, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5086, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.34282700421940926, |
|
"grad_norm": 0.6081704497337341, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5356, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.3438818565400844, |
|
"grad_norm": 0.6042312979698181, |
|
"learning_rate": 0.0015, |
|
"loss": 1.523, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.3449367088607595, |
|
"grad_norm": 0.6200316548347473, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5428, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.3459915611814346, |
|
"grad_norm": 0.9771542549133301, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5421, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.3470464135021097, |
|
"grad_norm": 1.0858914852142334, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5248, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.34810126582278483, |
|
"grad_norm": 0.5964863300323486, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5354, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.3491561181434599, |
|
"grad_norm": 0.6484059691429138, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5339, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.350210970464135, |
|
"grad_norm": 0.6149209141731262, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5415, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.3512658227848101, |
|
"grad_norm": 0.616062581539154, |
|
"learning_rate": 0.0015, |
|
"loss": 1.526, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.35232067510548526, |
|
"grad_norm": 0.6185603141784668, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5272, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.35337552742616035, |
|
"grad_norm": 0.6322174072265625, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5195, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.35443037974683544, |
|
"grad_norm": 0.5539165735244751, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5197, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.35548523206751054, |
|
"grad_norm": 0.6246862411499023, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5295, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.35654008438818563, |
|
"grad_norm": 0.563215434551239, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5246, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.3575949367088608, |
|
"grad_norm": 0.6254818439483643, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5194, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.35864978902953587, |
|
"grad_norm": 0.6142661571502686, |
|
"learning_rate": 0.0015, |
|
"loss": 1.529, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.35970464135021096, |
|
"grad_norm": 0.6170629262924194, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5256, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.36075949367088606, |
|
"grad_norm": 0.6925488710403442, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5072, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.3618143459915612, |
|
"grad_norm": 0.6214223504066467, |
|
"learning_rate": 0.0015, |
|
"loss": 1.525, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.3628691983122363, |
|
"grad_norm": 0.6367167830467224, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5188, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.3639240506329114, |
|
"grad_norm": 0.5955284237861633, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5162, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.3649789029535865, |
|
"grad_norm": 0.8075214624404907, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5109, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.36603375527426163, |
|
"grad_norm": 0.6733569502830505, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5271, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.3670886075949367, |
|
"grad_norm": 0.6443074941635132, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5324, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.3681434599156118, |
|
"grad_norm": 0.5668088793754578, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5227, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.3691983122362869, |
|
"grad_norm": 0.5876608490943909, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5037, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.370253164556962, |
|
"grad_norm": 0.6632211804389954, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5236, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.37130801687763715, |
|
"grad_norm": 0.5315051078796387, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5217, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.37236286919831224, |
|
"grad_norm": 0.652911365032196, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5295, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.37341772151898733, |
|
"grad_norm": 0.9318296909332275, |
|
"learning_rate": 0.0015, |
|
"loss": 1.523, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.3744725738396624, |
|
"grad_norm": 0.6587705612182617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5133, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.3755274261603376, |
|
"grad_norm": 0.586134672164917, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5155, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.37658227848101267, |
|
"grad_norm": 0.6934296488761902, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5097, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.37763713080168776, |
|
"grad_norm": 0.591506838798523, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5117, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.37869198312236285, |
|
"grad_norm": 0.6162989735603333, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5228, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.379746835443038, |
|
"grad_norm": 0.7782261371612549, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5094, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.3808016877637131, |
|
"grad_norm": 0.6871771216392517, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5094, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.3818565400843882, |
|
"grad_norm": 0.6823848485946655, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5118, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.3829113924050633, |
|
"grad_norm": 0.6270849108695984, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5111, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.38396624472573837, |
|
"grad_norm": 0.8293321132659912, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5135, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.3850210970464135, |
|
"grad_norm": 0.697428822517395, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5199, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.3860759493670886, |
|
"grad_norm": 0.5967044234275818, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5125, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.3871308016877637, |
|
"grad_norm": 0.5544715523719788, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5075, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.3881856540084388, |
|
"grad_norm": 0.9727868437767029, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5113, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.38924050632911394, |
|
"grad_norm": 0.7310069799423218, |
|
"learning_rate": 0.0015, |
|
"loss": 1.519, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.39029535864978904, |
|
"grad_norm": 0.7747533917427063, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5223, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.39135021097046413, |
|
"grad_norm": 0.5666539072990417, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5085, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.3924050632911392, |
|
"grad_norm": 0.59869384765625, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4887, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.39345991561181437, |
|
"grad_norm": 0.520443856716156, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5077, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.39451476793248946, |
|
"grad_norm": 0.6021689176559448, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5085, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.39556962025316456, |
|
"grad_norm": 0.7624187469482422, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5086, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.39662447257383965, |
|
"grad_norm": 0.8453581929206848, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5013, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.39767932489451474, |
|
"grad_norm": 0.7012314796447754, |
|
"learning_rate": 0.0015, |
|
"loss": 1.518, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.3987341772151899, |
|
"grad_norm": 0.6165980696678162, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5028, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.399789029535865, |
|
"grad_norm": 0.7216675281524658, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5087, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.4008438818565401, |
|
"grad_norm": 0.6874868273735046, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5116, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.40189873417721517, |
|
"grad_norm": 0.5580333471298218, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5012, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.4029535864978903, |
|
"grad_norm": 0.6233204007148743, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4946, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.4040084388185654, |
|
"grad_norm": 0.6824832558631897, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4943, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 0.5857206583023071, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5137, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.4061181434599156, |
|
"grad_norm": 0.6413280367851257, |
|
"learning_rate": 0.0015, |
|
"loss": 1.497, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.40717299578059074, |
|
"grad_norm": 0.6493369340896606, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5088, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.40822784810126583, |
|
"grad_norm": 0.6379006505012512, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4983, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.4092827004219409, |
|
"grad_norm": 0.6437706351280212, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4893, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.410337552742616, |
|
"grad_norm": 0.5746003985404968, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4896, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.41139240506329117, |
|
"grad_norm": 1.0675814151763916, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4927, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.41244725738396626, |
|
"grad_norm": 0.7103888392448425, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4975, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.41350210970464135, |
|
"grad_norm": 0.53952556848526, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4904, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.41455696202531644, |
|
"grad_norm": 0.5914176106452942, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4952, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.41561181434599154, |
|
"grad_norm": 0.6269722580909729, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4931, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.6420876383781433, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5002, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.4177215189873418, |
|
"grad_norm": 0.5700493454933167, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5001, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.41877637130801687, |
|
"grad_norm": 0.6639719009399414, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4879, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.41983122362869196, |
|
"grad_norm": 0.6735414266586304, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4985, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.4208860759493671, |
|
"grad_norm": 0.6394850015640259, |
|
"learning_rate": 0.0015, |
|
"loss": 1.488, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.4219409282700422, |
|
"grad_norm": 0.6736827492713928, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5041, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4229957805907173, |
|
"grad_norm": 0.6361777186393738, |
|
"learning_rate": 0.0015, |
|
"loss": 1.489, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.4240506329113924, |
|
"grad_norm": 0.5799570679664612, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4927, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.42510548523206754, |
|
"grad_norm": 0.5939344763755798, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4866, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.42616033755274263, |
|
"grad_norm": 0.5622565746307373, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4965, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.4272151898734177, |
|
"grad_norm": 0.5914807915687561, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4961, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.4282700421940928, |
|
"grad_norm": 0.6562783122062683, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4998, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.4293248945147679, |
|
"grad_norm": 0.6392592191696167, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4958, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.43037974683544306, |
|
"grad_norm": 0.8016908168792725, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5008, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.43143459915611815, |
|
"grad_norm": 0.6368083953857422, |
|
"learning_rate": 0.0015, |
|
"loss": 1.501, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.43248945147679324, |
|
"grad_norm": 0.6188535690307617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4872, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.43354430379746833, |
|
"grad_norm": 0.7229503393173218, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4801, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.4345991561181435, |
|
"grad_norm": 0.6336178183555603, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4864, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.4356540084388186, |
|
"grad_norm": 0.5833616256713867, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4815, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.43670886075949367, |
|
"grad_norm": 0.5828094482421875, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4856, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.43776371308016876, |
|
"grad_norm": 0.6903582215309143, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4899, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.4388185654008439, |
|
"grad_norm": 0.6231018900871277, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4857, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.439873417721519, |
|
"grad_norm": 0.5356478095054626, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4836, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.4409282700421941, |
|
"grad_norm": 0.5384558439254761, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4791, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.4419831223628692, |
|
"grad_norm": 0.7372967600822449, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4871, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.4430379746835443, |
|
"grad_norm": 0.700654923915863, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4888, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.4440928270042194, |
|
"grad_norm": 0.5394490957260132, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4772, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.4451476793248945, |
|
"grad_norm": 0.614011824131012, |
|
"learning_rate": 0.0015, |
|
"loss": 1.487, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.4462025316455696, |
|
"grad_norm": 0.7026930451393127, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4813, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.4472573839662447, |
|
"grad_norm": 0.5955589413642883, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4815, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.44831223628691985, |
|
"grad_norm": 0.8915395140647888, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4865, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.44936708860759494, |
|
"grad_norm": 0.663708508014679, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4911, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.45042194092827004, |
|
"grad_norm": 0.5448995232582092, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4808, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.45147679324894513, |
|
"grad_norm": 0.6886418461799622, |
|
"learning_rate": 0.0015, |
|
"loss": 1.475, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.4525316455696203, |
|
"grad_norm": 0.7126356363296509, |
|
"learning_rate": 0.0015, |
|
"loss": 1.486, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.45358649789029537, |
|
"grad_norm": 0.5966187715530396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4828, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.45464135021097046, |
|
"grad_norm": 0.5577530860900879, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4824, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.45569620253164556, |
|
"grad_norm": 0.7881457209587097, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4776, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.45675105485232065, |
|
"grad_norm": 0.5317174196243286, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4862, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.4578059071729958, |
|
"grad_norm": 0.5694594383239746, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4766, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.4588607594936709, |
|
"grad_norm": 0.6109514832496643, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4795, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.459915611814346, |
|
"grad_norm": 0.5422523021697998, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4936, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.4609704641350211, |
|
"grad_norm": 0.5639380216598511, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4682, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.4620253164556962, |
|
"grad_norm": 0.5603376626968384, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4781, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.4630801687763713, |
|
"grad_norm": 0.5457561016082764, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4733, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.4641350210970464, |
|
"grad_norm": 0.5526235103607178, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4797, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.4651898734177215, |
|
"grad_norm": 0.6319583058357239, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4939, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.46624472573839665, |
|
"grad_norm": 0.6389647722244263, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4749, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.46729957805907174, |
|
"grad_norm": 0.6226533651351929, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4792, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.46835443037974683, |
|
"grad_norm": 0.6680729985237122, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4782, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.4694092827004219, |
|
"grad_norm": 0.699388861656189, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4839, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.4704641350210971, |
|
"grad_norm": 0.5822103023529053, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4698, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.47151898734177217, |
|
"grad_norm": 0.5479047894477844, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4802, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.47257383966244726, |
|
"grad_norm": 0.5745465159416199, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4838, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.47362869198312235, |
|
"grad_norm": 0.6743048429489136, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4712, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.47468354430379744, |
|
"grad_norm": 0.6289355158805847, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4706, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.4757383966244726, |
|
"grad_norm": 0.5514520406723022, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4788, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.4767932489451477, |
|
"grad_norm": 0.6398304104804993, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4609, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.4778481012658228, |
|
"grad_norm": 0.6768178939819336, |
|
"learning_rate": 0.0015, |
|
"loss": 1.479, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.47890295358649787, |
|
"grad_norm": 0.5781738758087158, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4804, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.479957805907173, |
|
"grad_norm": 0.6289318203926086, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4785, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.4810126582278481, |
|
"grad_norm": 0.6006326675415039, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4664, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.4820675105485232, |
|
"grad_norm": 0.5571173429489136, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4487, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.4831223628691983, |
|
"grad_norm": 0.7171228528022766, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4739, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.48417721518987344, |
|
"grad_norm": 0.7601715922355652, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4601, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.48523206751054854, |
|
"grad_norm": 0.6442757248878479, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4807, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.48628691983122363, |
|
"grad_norm": 0.8816727995872498, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4832, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.4873417721518987, |
|
"grad_norm": 0.6388178467750549, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4599, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.4883966244725738, |
|
"grad_norm": 0.6010212898254395, |
|
"learning_rate": 0.0015, |
|
"loss": 1.476, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.48945147679324896, |
|
"grad_norm": 0.5553440451622009, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4746, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.49050632911392406, |
|
"grad_norm": 0.5826687216758728, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4771, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.49156118143459915, |
|
"grad_norm": 0.565367579460144, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4675, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.49261603375527424, |
|
"grad_norm": 0.8090323805809021, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4669, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.4936708860759494, |
|
"grad_norm": 0.5813798904418945, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4625, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.4947257383966245, |
|
"grad_norm": 0.8920308947563171, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4601, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.4957805907172996, |
|
"grad_norm": 0.5381173491477966, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4686, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.49683544303797467, |
|
"grad_norm": 0.5849639773368835, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4717, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.4978902953586498, |
|
"grad_norm": 0.6569827795028687, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4651, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.4989451476793249, |
|
"grad_norm": 0.5914872884750366, |
|
"learning_rate": 0.0015, |
|
"loss": 1.462, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.7037190794944763, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4619, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.5010548523206751, |
|
"grad_norm": 0.7166423797607422, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4828, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.5021097046413502, |
|
"grad_norm": 0.5268374681472778, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4669, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.5031645569620253, |
|
"grad_norm": 0.5577636361122131, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4635, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.5042194092827004, |
|
"grad_norm": 0.5795514583587646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.475, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.5052742616033755, |
|
"grad_norm": 0.670405924320221, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4563, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 0.7040602564811707, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4665, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.5073839662447257, |
|
"grad_norm": 0.6669322848320007, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4647, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.5084388185654009, |
|
"grad_norm": 0.7414965629577637, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4835, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.509493670886076, |
|
"grad_norm": 0.5885231494903564, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4645, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.510548523206751, |
|
"grad_norm": 0.6001221537590027, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4597, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.5116033755274262, |
|
"grad_norm": 0.5307698249816895, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4736, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.5126582278481012, |
|
"grad_norm": 0.5808476209640503, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4604, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.5137130801687764, |
|
"grad_norm": 0.8642584085464478, |
|
"learning_rate": 0.0015, |
|
"loss": 1.454, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.5147679324894515, |
|
"grad_norm": 0.5445716381072998, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4775, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.5158227848101266, |
|
"grad_norm": 0.5095697045326233, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4624, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.5168776371308017, |
|
"grad_norm": 0.633233368396759, |
|
"learning_rate": 0.0015, |
|
"loss": 1.474, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.5179324894514767, |
|
"grad_norm": 0.5968495607376099, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4661, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.5189873417721519, |
|
"grad_norm": 0.7888312339782715, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4584, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.520042194092827, |
|
"grad_norm": 0.7476255893707275, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4646, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.5210970464135021, |
|
"grad_norm": 0.6436273455619812, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4643, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.5221518987341772, |
|
"grad_norm": 0.5799338221549988, |
|
"learning_rate": 0.0015, |
|
"loss": 1.47, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.5232067510548524, |
|
"grad_norm": 0.6167886853218079, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4514, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.5242616033755274, |
|
"grad_norm": 0.57327800989151, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4614, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.5253164556962026, |
|
"grad_norm": 0.595667839050293, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4537, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.5263713080168776, |
|
"grad_norm": 0.5202828049659729, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4572, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.5274261603375527, |
|
"grad_norm": 0.561742901802063, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4678, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5284810126582279, |
|
"grad_norm": 0.6367873549461365, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4594, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.5295358649789029, |
|
"grad_norm": 0.5855687260627747, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4534, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.5305907172995781, |
|
"grad_norm": 0.5318405032157898, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4396, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.5316455696202531, |
|
"grad_norm": 0.5266106128692627, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4657, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.5327004219409283, |
|
"grad_norm": 0.6130898594856262, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4534, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.5337552742616034, |
|
"grad_norm": 0.5058093667030334, |
|
"learning_rate": 0.0015, |
|
"loss": 1.465, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.5348101265822784, |
|
"grad_norm": 0.7037327289581299, |
|
"learning_rate": 0.0015, |
|
"loss": 1.452, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.5358649789029536, |
|
"grad_norm": 0.5680815577507019, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4676, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.5369198312236287, |
|
"grad_norm": 0.7030700445175171, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4598, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.5379746835443038, |
|
"grad_norm": 0.6677927374839783, |
|
"learning_rate": 0.0015, |
|
"loss": 1.442, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.5390295358649789, |
|
"grad_norm": 0.5371360182762146, |
|
"learning_rate": 0.0015, |
|
"loss": 1.456, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.540084388185654, |
|
"grad_norm": 0.6903518438339233, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4524, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.5411392405063291, |
|
"grad_norm": 0.5381430387496948, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4489, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.5421940928270043, |
|
"grad_norm": 0.7357153296470642, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4644, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.5432489451476793, |
|
"grad_norm": 0.5630563497543335, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4599, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.5443037974683544, |
|
"grad_norm": 0.5637800693511963, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4546, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.5453586497890295, |
|
"grad_norm": 0.5217317342758179, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4565, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.5464135021097046, |
|
"grad_norm": 0.6386004090309143, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4663, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.5474683544303798, |
|
"grad_norm": 0.6506286859512329, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4465, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.5485232067510548, |
|
"grad_norm": 0.7016057372093201, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4591, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.54957805907173, |
|
"grad_norm": 0.6944337487220764, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4463, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.5506329113924051, |
|
"grad_norm": 0.6351850628852844, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4553, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.5516877637130801, |
|
"grad_norm": 0.6268001794815063, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4496, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.5527426160337553, |
|
"grad_norm": 0.5437992215156555, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4564, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.5537974683544303, |
|
"grad_norm": 0.6136021614074707, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4477, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.5548523206751055, |
|
"grad_norm": 0.5644496083259583, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4539, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.5559071729957806, |
|
"grad_norm": 0.5912958979606628, |
|
"learning_rate": 0.0015, |
|
"loss": 1.461, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.5569620253164557, |
|
"grad_norm": 0.725299596786499, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4503, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.5580168776371308, |
|
"grad_norm": 0.527134358882904, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4504, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.5590717299578059, |
|
"grad_norm": 0.706976056098938, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4573, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.560126582278481, |
|
"grad_norm": 0.8941502571105957, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4439, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.5611814345991561, |
|
"grad_norm": 0.6011282205581665, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4446, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.5622362869198312, |
|
"grad_norm": 0.5284645557403564, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4557, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.5632911392405063, |
|
"grad_norm": 0.5965940952301025, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4487, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.5643459915611815, |
|
"grad_norm": 0.5459398627281189, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4382, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.5654008438818565, |
|
"grad_norm": 0.5910132527351379, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4485, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.5664556962025317, |
|
"grad_norm": 0.5403808951377869, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4485, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.5675105485232067, |
|
"grad_norm": 0.743107259273529, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4454, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.5685654008438819, |
|
"grad_norm": 0.7197443842887878, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4446, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.569620253164557, |
|
"grad_norm": 0.5556684732437134, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4439, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.570675105485232, |
|
"grad_norm": 0.5397831201553345, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4466, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.5717299578059072, |
|
"grad_norm": 0.5782108902931213, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4414, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.5727848101265823, |
|
"grad_norm": 0.7732884287834167, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4402, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.5738396624472574, |
|
"grad_norm": 0.5555076003074646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4392, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.5748945147679325, |
|
"grad_norm": 0.5733400583267212, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4459, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.5759493670886076, |
|
"grad_norm": 0.6663935780525208, |
|
"learning_rate": 0.0015, |
|
"loss": 1.448, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.5770042194092827, |
|
"grad_norm": 0.5980082750320435, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4468, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.5780590717299579, |
|
"grad_norm": 0.7896722555160522, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4477, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.5791139240506329, |
|
"grad_norm": 0.5920150876045227, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4514, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.580168776371308, |
|
"grad_norm": 0.6280402541160583, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4431, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.5812236286919831, |
|
"grad_norm": 0.6328909993171692, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4502, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.5822784810126582, |
|
"grad_norm": 0.599063515663147, |
|
"learning_rate": 0.0015, |
|
"loss": 1.44, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 0.6359413266181946, |
|
"learning_rate": 0.0015, |
|
"loss": 1.436, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.5843881856540084, |
|
"grad_norm": 0.5926364064216614, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4371, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.5854430379746836, |
|
"grad_norm": 0.608309805393219, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4529, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.5864978902953587, |
|
"grad_norm": 0.5794243812561035, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4546, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.5875527426160337, |
|
"grad_norm": 0.5262650847434998, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4407, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.5886075949367089, |
|
"grad_norm": 0.5861445665359497, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4393, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.5896624472573839, |
|
"grad_norm": 0.5474296808242798, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4427, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.5907172995780591, |
|
"grad_norm": 0.8075207471847534, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4387, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.5917721518987342, |
|
"grad_norm": 0.5450811386108398, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4435, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.5928270042194093, |
|
"grad_norm": 0.6229982376098633, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4514, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.5938818565400844, |
|
"grad_norm": 0.7292780876159668, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4442, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.5949367088607594, |
|
"grad_norm": 0.5265942215919495, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4549, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.5959915611814346, |
|
"grad_norm": 0.5641297101974487, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4506, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.5970464135021097, |
|
"grad_norm": 0.8600500822067261, |
|
"learning_rate": 0.0015, |
|
"loss": 1.446, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.5981012658227848, |
|
"grad_norm": 0.5192574858665466, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4502, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.5991561181434599, |
|
"grad_norm": 0.5317137241363525, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4482, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.6002109704641351, |
|
"grad_norm": 0.8360275030136108, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4275, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.6012658227848101, |
|
"grad_norm": 0.6226162314414978, |
|
"learning_rate": 0.0015, |
|
"loss": 1.438, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.6023206751054853, |
|
"grad_norm": 0.6044192910194397, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4403, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.6033755274261603, |
|
"grad_norm": 0.5660203099250793, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4417, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.6044303797468354, |
|
"grad_norm": 0.5431947708129883, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4419, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.6054852320675106, |
|
"grad_norm": 0.6212903261184692, |
|
"learning_rate": 0.0015, |
|
"loss": 1.44, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.6065400843881856, |
|
"grad_norm": 0.6808838248252869, |
|
"learning_rate": 0.0015, |
|
"loss": 1.414, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 0.561962366104126, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4223, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.6086497890295358, |
|
"grad_norm": 0.6142343282699585, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4247, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.609704641350211, |
|
"grad_norm": 0.5520644783973694, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4344, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.6107594936708861, |
|
"grad_norm": 0.5744478702545166, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4308, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.6118143459915611, |
|
"grad_norm": 0.6295973658561707, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4377, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.6128691983122363, |
|
"grad_norm": 0.5243735313415527, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4401, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.6139240506329114, |
|
"grad_norm": 0.5113600492477417, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4511, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.6149789029535865, |
|
"grad_norm": 0.585241973400116, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4345, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.6160337552742616, |
|
"grad_norm": 0.6048781871795654, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4481, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.6170886075949367, |
|
"grad_norm": 0.5778324604034424, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4512, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.6181434599156118, |
|
"grad_norm": 0.5663139820098877, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4372, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.619198312236287, |
|
"grad_norm": 0.516270637512207, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4333, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.620253164556962, |
|
"grad_norm": 0.6882340312004089, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4428, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.6213080168776371, |
|
"grad_norm": 0.6412220001220703, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4334, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.6223628691983122, |
|
"grad_norm": 0.5238667130470276, |
|
"learning_rate": 0.0015, |
|
"loss": 1.435, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.6234177215189873, |
|
"grad_norm": 0.5808557868003845, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4282, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.6244725738396625, |
|
"grad_norm": 0.7341051697731018, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4274, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.6255274261603375, |
|
"grad_norm": 0.8130826950073242, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4306, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.6265822784810127, |
|
"grad_norm": 0.6295958161354065, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4392, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.6276371308016878, |
|
"grad_norm": 0.6690378189086914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.427, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.6286919831223629, |
|
"grad_norm": 0.5623489618301392, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4341, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.629746835443038, |
|
"grad_norm": 0.5505332946777344, |
|
"learning_rate": 0.0015, |
|
"loss": 1.437, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.630801687763713, |
|
"grad_norm": 0.5139110684394836, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4171, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.6318565400843882, |
|
"grad_norm": 0.6092433333396912, |
|
"learning_rate": 0.0015, |
|
"loss": 1.438, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.6329113924050633, |
|
"grad_norm": 0.5405699610710144, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4483, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.6339662447257384, |
|
"grad_norm": 0.5922624468803406, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4344, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.6350210970464135, |
|
"grad_norm": 0.6210659146308899, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4326, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.6360759493670886, |
|
"grad_norm": 0.5855004191398621, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4157, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.6371308016877637, |
|
"grad_norm": 0.5853936076164246, |
|
"learning_rate": 0.0015, |
|
"loss": 1.435, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.6381856540084389, |
|
"grad_norm": 0.5281463861465454, |
|
"learning_rate": 0.0015, |
|
"loss": 1.419, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.6392405063291139, |
|
"grad_norm": 0.7227752804756165, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4388, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.640295358649789, |
|
"grad_norm": 0.7425885200500488, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4294, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.6413502109704642, |
|
"grad_norm": 0.5315224528312683, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4294, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.6424050632911392, |
|
"grad_norm": 0.5397734642028809, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4278, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.6434599156118144, |
|
"grad_norm": 0.5076785087585449, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4216, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.6445147679324894, |
|
"grad_norm": 0.5564759373664856, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4333, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.6455696202531646, |
|
"grad_norm": 0.5860690474510193, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4296, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.6466244725738397, |
|
"grad_norm": 0.5914776921272278, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4267, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.6476793248945147, |
|
"grad_norm": 0.9994964599609375, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4279, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.6487341772151899, |
|
"grad_norm": 0.6222503781318665, |
|
"learning_rate": 0.0015, |
|
"loss": 1.441, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.6497890295358649, |
|
"grad_norm": 0.5461714863777161, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4296, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.6508438818565401, |
|
"grad_norm": 0.5366678237915039, |
|
"learning_rate": 0.0014854972418331944, |
|
"loss": 1.4111, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.6518987341772152, |
|
"grad_norm": 0.5969879627227783, |
|
"learning_rate": 0.0014650219182191931, |
|
"loss": 1.4339, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.6529535864978903, |
|
"grad_norm": 0.5103928446769714, |
|
"learning_rate": 0.001444828815847542, |
|
"loss": 1.4352, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.6540084388185654, |
|
"grad_norm": 0.6483280658721924, |
|
"learning_rate": 0.0014249140447269945, |
|
"loss": 1.4254, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.6550632911392406, |
|
"grad_norm": 0.5745824575424194, |
|
"learning_rate": 0.0014052737684839257, |
|
"loss": 1.422, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.6561181434599156, |
|
"grad_norm": 0.6464072465896606, |
|
"learning_rate": 0.0013859042036232954, |
|
"loss": 1.4291, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.6571729957805907, |
|
"grad_norm": 0.9690040946006775, |
|
"learning_rate": 0.001366801618799797, |
|
"loss": 1.416, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.6582278481012658, |
|
"grad_norm": 0.6810333132743835, |
|
"learning_rate": 0.001347962334099052, |
|
"loss": 1.4176, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.6592827004219409, |
|
"grad_norm": 0.4878316819667816, |
|
"learning_rate": 0.0013293827203287143, |
|
"loss": 1.4245, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.6603375527426161, |
|
"grad_norm": 0.5093593001365662, |
|
"learning_rate": 0.0013110591983193423, |
|
"loss": 1.4125, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.6613924050632911, |
|
"grad_norm": 0.6860863566398621, |
|
"learning_rate": 0.0012929882382349102, |
|
"loss": 1.4218, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.6624472573839663, |
|
"grad_norm": 0.5376235842704773, |
|
"learning_rate": 0.0012751663588928214, |
|
"loss": 1.417, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.6635021097046413, |
|
"grad_norm": 0.5228108763694763, |
|
"learning_rate": 0.0012575901270932943, |
|
"loss": 1.4114, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.6645569620253164, |
|
"grad_norm": 0.6319876313209534, |
|
"learning_rate": 0.0012402561569579936, |
|
"loss": 1.4056, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.6656118143459916, |
|
"grad_norm": 0.7265379428863525, |
|
"learning_rate": 0.0012231611092777745, |
|
"loss": 1.4061, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.5666329264640808, |
|
"learning_rate": 0.0012063016908694193, |
|
"loss": 1.3991, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.6677215189873418, |
|
"grad_norm": 0.546264111995697, |
|
"learning_rate": 0.0011896746539412405, |
|
"loss": 1.4098, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.6687763713080169, |
|
"grad_norm": 0.5310012102127075, |
|
"learning_rate": 0.0011732767954674265, |
|
"loss": 1.4009, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.669831223628692, |
|
"grad_norm": 0.6444507241249084, |
|
"learning_rate": 0.0011571049565710122, |
|
"loss": 1.4051, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.6708860759493671, |
|
"grad_norm": 0.7691652774810791, |
|
"learning_rate": 0.001141156021915355, |
|
"loss": 1.4021, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.6719409282700421, |
|
"grad_norm": 0.5308224558830261, |
|
"learning_rate": 0.001125426919103997, |
|
"loss": 1.3832, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.6729957805907173, |
|
"grad_norm": 0.49382564425468445, |
|
"learning_rate": 0.001109914618088799, |
|
"loss": 1.3857, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.6740506329113924, |
|
"grad_norm": 0.7249054312705994, |
|
"learning_rate": 0.0010946161305862348, |
|
"loss": 1.3995, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.6751054852320675, |
|
"grad_norm": 0.7635771036148071, |
|
"learning_rate": 0.001079528509501728, |
|
"loss": 1.3984, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.6761603375527426, |
|
"grad_norm": 0.5237568020820618, |
|
"learning_rate": 0.0010646488483619261, |
|
"loss": 1.3913, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.6772151898734177, |
|
"grad_norm": 0.5200632810592651, |
|
"learning_rate": 0.0010499742807547976, |
|
"loss": 1.3801, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.6782700421940928, |
|
"grad_norm": 0.4805111587047577, |
|
"learning_rate": 0.0010355019797774478, |
|
"loss": 1.3887, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.679324894514768, |
|
"grad_norm": 0.6184947490692139, |
|
"learning_rate": 0.001021229157491546, |
|
"loss": 1.3747, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.680379746835443, |
|
"grad_norm": 0.5290433764457703, |
|
"learning_rate": 0.0010071530643862578, |
|
"loss": 1.3779, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.6814345991561181, |
|
"grad_norm": 0.4848653972148895, |
|
"learning_rate": 0.000993270988848579, |
|
"loss": 1.3894, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.6824894514767933, |
|
"grad_norm": 0.5046072602272034, |
|
"learning_rate": 0.0009795802566409742, |
|
"loss": 1.3842, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.6835443037974683, |
|
"grad_norm": 0.6274845004081726, |
|
"learning_rate": 0.0009660782303862109, |
|
"loss": 1.3926, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.6845991561181435, |
|
"grad_norm": 0.49987682700157166, |
|
"learning_rate": 0.0009527623090592963, |
|
"loss": 1.3827, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.6856540084388185, |
|
"grad_norm": 0.5083965063095093, |
|
"learning_rate": 0.0009396299274864177, |
|
"loss": 1.398, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.6867088607594937, |
|
"grad_norm": 0.49454379081726074, |
|
"learning_rate": 0.0009266785558507877, |
|
"loss": 1.3962, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.6877637130801688, |
|
"grad_norm": 0.5040996074676514, |
|
"learning_rate": 0.0009139056992053016, |
|
"loss": 1.3786, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.6888185654008439, |
|
"grad_norm": 0.667354941368103, |
|
"learning_rate": 0.000901308896991912, |
|
"loss": 1.3717, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.689873417721519, |
|
"grad_norm": 0.5207562446594238, |
|
"learning_rate": 0.000888885722567627, |
|
"loss": 1.3789, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.6909282700421941, |
|
"grad_norm": 0.519875705242157, |
|
"learning_rate": 0.0008766337827370438, |
|
"loss": 1.3727, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.6919831223628692, |
|
"grad_norm": 0.7908602952957153, |
|
"learning_rate": 0.000864550717291324, |
|
"loss": 1.3749, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.6930379746835443, |
|
"grad_norm": 0.762065589427948, |
|
"learning_rate": 0.0008526341985535229, |
|
"loss": 1.3702, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.6940928270042194, |
|
"grad_norm": 0.5344306230545044, |
|
"learning_rate": 0.0008408819309301891, |
|
"loss": 1.3655, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.6951476793248945, |
|
"grad_norm": 0.6269177198410034, |
|
"learning_rate": 0.0008292916504691397, |
|
"loss": 1.3701, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.6962025316455697, |
|
"grad_norm": 0.4671165347099304, |
|
"learning_rate": 0.0008178611244233354, |
|
"loss": 1.3761, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.6972573839662447, |
|
"grad_norm": 0.5067280530929565, |
|
"learning_rate": 0.0008065881508207637, |
|
"loss": 1.3606, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.6983122362869199, |
|
"grad_norm": 0.4908502995967865, |
|
"learning_rate": 0.0007954705580402523, |
|
"loss": 1.3697, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.6993670886075949, |
|
"grad_norm": 0.49855223298072815, |
|
"learning_rate": 0.0007845062043931298, |
|
"loss": 1.36, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.70042194092827, |
|
"grad_norm": 0.5829418897628784, |
|
"learning_rate": 0.0007736929777106497, |
|
"loss": 1.3663, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.7014767932489452, |
|
"grad_norm": 0.5269045233726501, |
|
"learning_rate": 0.000763028794937105, |
|
"loss": 1.3586, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.7025316455696202, |
|
"grad_norm": 0.46957603096961975, |
|
"learning_rate": 0.0007525116017285476, |
|
"loss": 1.3576, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.7035864978902954, |
|
"grad_norm": 0.4709470272064209, |
|
"learning_rate": 0.0007421393720570417, |
|
"loss": 1.3605, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.7046413502109705, |
|
"grad_norm": 0.4660401940345764, |
|
"learning_rate": 0.0007319101078203694, |
|
"loss": 1.3594, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.7056962025316456, |
|
"grad_norm": 0.6550858616828918, |
|
"learning_rate": 0.0007218218384571178, |
|
"loss": 1.3543, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.7067510548523207, |
|
"grad_norm": 0.55482017993927, |
|
"learning_rate": 0.0007118726205670703, |
|
"loss": 1.355, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.7078059071729957, |
|
"grad_norm": 0.5752652883529663, |
|
"learning_rate": 0.0007020605375368316, |
|
"loss": 1.3582, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 0.6009365320205688, |
|
"learning_rate": 0.000692383699170611, |
|
"loss": 1.3487, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.709915611814346, |
|
"grad_norm": 0.508026659488678, |
|
"learning_rate": 0.0006828402413260966, |
|
"loss": 1.3564, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.7109704641350211, |
|
"grad_norm": 0.5703988075256348, |
|
"learning_rate": 0.0006734283255553471, |
|
"loss": 1.3563, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.7120253164556962, |
|
"grad_norm": 0.4827798008918762, |
|
"learning_rate": 0.0006641461387506347, |
|
"loss": 1.3582, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.7130801687763713, |
|
"grad_norm": 0.5830769538879395, |
|
"learning_rate": 0.0006549918927951678, |
|
"loss": 1.3562, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.7141350210970464, |
|
"grad_norm": 0.5382652878761292, |
|
"learning_rate": 0.0006459638242186297, |
|
"loss": 1.3544, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.7151898734177216, |
|
"grad_norm": 0.4844338893890381, |
|
"learning_rate": 0.0006370601938574639, |
|
"loss": 1.3535, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.7162447257383966, |
|
"grad_norm": 0.5328819751739502, |
|
"learning_rate": 0.0006282792865198421, |
|
"loss": 1.3573, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.7172995780590717, |
|
"grad_norm": 0.48490241169929504, |
|
"learning_rate": 0.0006196194106552512, |
|
"loss": 1.3444, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.7183544303797469, |
|
"grad_norm": 0.5487757921218872, |
|
"learning_rate": 0.0006110788980286328, |
|
"loss": 1.3464, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.7194092827004219, |
|
"grad_norm": 0.5482735633850098, |
|
"learning_rate": 0.0006026561033990158, |
|
"loss": 1.3486, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.7204641350210971, |
|
"grad_norm": 0.5385942459106445, |
|
"learning_rate": 0.000594349404202577, |
|
"loss": 1.3443, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.7215189873417721, |
|
"grad_norm": 0.6625657081604004, |
|
"learning_rate": 0.0005861572002400716, |
|
"loss": 1.3345, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.7225738396624473, |
|
"grad_norm": 0.5083481073379517, |
|
"learning_rate": 0.0005780779133685717, |
|
"loss": 1.343, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.7236286919831224, |
|
"grad_norm": 0.5370842218399048, |
|
"learning_rate": 0.0005701099871974524, |
|
"loss": 1.3387, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.7246835443037974, |
|
"grad_norm": 0.4709780812263489, |
|
"learning_rate": 0.0005622518867885708, |
|
"loss": 1.3348, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.7257383966244726, |
|
"grad_norm": 0.5011778473854065, |
|
"learning_rate": 0.0005545020983605748, |
|
"loss": 1.3441, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.7267932489451476, |
|
"grad_norm": 0.5303826928138733, |
|
"learning_rate": 0.0005468591289972898, |
|
"loss": 1.3316, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.7278481012658228, |
|
"grad_norm": 0.48876091837882996, |
|
"learning_rate": 0.0005393215063601232, |
|
"loss": 1.3325, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.7289029535864979, |
|
"grad_norm": 0.5018401145935059, |
|
"learning_rate": 0.0005318877784044343, |
|
"loss": 1.356, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.729957805907173, |
|
"grad_norm": 0.5491061806678772, |
|
"learning_rate": 0.0005245565130998126, |
|
"loss": 1.3361, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.7310126582278481, |
|
"grad_norm": 0.4945247769355774, |
|
"learning_rate": 0.000517326298154212, |
|
"loss": 1.3428, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.7320675105485233, |
|
"grad_norm": 0.5948100686073303, |
|
"learning_rate": 0.0005101957407418877, |
|
"loss": 1.3448, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.7331223628691983, |
|
"grad_norm": 0.4961825907230377, |
|
"learning_rate": 0.0005031634672350829, |
|
"loss": 1.3417, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.7341772151898734, |
|
"grad_norm": 0.5309960246086121, |
|
"learning_rate": 0.0004962281229394129, |
|
"loss": 1.3315, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.7352320675105485, |
|
"grad_norm": 0.4714023470878601, |
|
"learning_rate": 0.0004893883718328983, |
|
"loss": 1.3264, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.7362869198312236, |
|
"grad_norm": 0.5240657329559326, |
|
"learning_rate": 0.0004826428963085938, |
|
"loss": 1.3293, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.7373417721518988, |
|
"grad_norm": 0.5004906058311462, |
|
"learning_rate": 0.00047599039692076457, |
|
"loss": 1.3311, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.7383966244725738, |
|
"grad_norm": 0.4975900948047638, |
|
"learning_rate": 0.0004694295921345622, |
|
"loss": 1.3408, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.739451476793249, |
|
"grad_norm": 0.505084216594696, |
|
"learning_rate": 0.00046295921807915015, |
|
"loss": 1.3135, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.740506329113924, |
|
"grad_norm": 0.6532091498374939, |
|
"learning_rate": 0.00045657802830423164, |
|
"loss": 1.3279, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.7415611814345991, |
|
"grad_norm": 0.5637933611869812, |
|
"learning_rate": 0.00045028479353993473, |
|
"loss": 1.3303, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.7426160337552743, |
|
"grad_norm": 0.691018283367157, |
|
"learning_rate": 0.00044407830146000587, |
|
"loss": 1.3362, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.7436708860759493, |
|
"grad_norm": 0.569522500038147, |
|
"learning_rate": 0.0004379573564482676, |
|
"loss": 1.3213, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.7447257383966245, |
|
"grad_norm": 0.5943568348884583, |
|
"learning_rate": 0.0004319207793682963, |
|
"loss": 1.329, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.7457805907172996, |
|
"grad_norm": 0.6255069375038147, |
|
"learning_rate": 0.0004259674073362731, |
|
"loss": 1.335, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.7468354430379747, |
|
"grad_norm": 0.5183704495429993, |
|
"learning_rate": 0.00042009609349696626, |
|
"loss": 1.3295, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.7478902953586498, |
|
"grad_norm": 0.4704875946044922, |
|
"learning_rate": 0.00041430570680280233, |
|
"loss": 1.3277, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.7489451476793249, |
|
"grad_norm": 0.5074374079704285, |
|
"learning_rate": 0.0004085951317959809, |
|
"loss": 1.3227, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5200400948524475, |
|
"learning_rate": 0.00040296326839359315, |
|
"loss": 1.332, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.7510548523206751, |
|
"grad_norm": 0.6528633832931519, |
|
"learning_rate": 0.000397409031675703, |
|
"loss": 1.3265, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.7521097046413502, |
|
"grad_norm": 0.49102628231048584, |
|
"learning_rate": 0.00039193135167634786, |
|
"loss": 1.335, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.7531645569620253, |
|
"grad_norm": 0.5010769367218018, |
|
"learning_rate": 0.00038652917317742123, |
|
"loss": 1.318, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.7542194092827004, |
|
"grad_norm": 0.5290884375572205, |
|
"learning_rate": 0.0003812014555053956, |
|
"loss": 1.3173, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.7552742616033755, |
|
"grad_norm": 0.46309688687324524, |
|
"learning_rate": 0.00037594717233084774, |
|
"loss": 1.3241, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.7563291139240507, |
|
"grad_norm": 0.5092751979827881, |
|
"learning_rate": 0.0003707653114707471, |
|
"loss": 1.3304, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.7573839662447257, |
|
"grad_norm": 0.5297157764434814, |
|
"learning_rate": 0.00036565487469346906, |
|
"loss": 1.3099, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.7584388185654009, |
|
"grad_norm": 0.47773805260658264, |
|
"learning_rate": 0.0003606148775264958, |
|
"loss": 1.3127, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 0.5312460660934448, |
|
"learning_rate": 0.0003556443490667684, |
|
"loss": 1.3177, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.760548523206751, |
|
"grad_norm": 0.6486009359359741, |
|
"learning_rate": 0.0003507423317936521, |
|
"loss": 1.321, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.7616033755274262, |
|
"grad_norm": 0.47344207763671875, |
|
"learning_rate": 0.00034590788138448006, |
|
"loss": 1.335, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.7626582278481012, |
|
"grad_norm": 0.47290611267089844, |
|
"learning_rate": 0.0003411400665326393, |
|
"loss": 1.3292, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.7637130801687764, |
|
"grad_norm": 0.4823418855667114, |
|
"learning_rate": 0.00033643796876816424, |
|
"loss": 1.3247, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.7647679324894515, |
|
"grad_norm": 0.5188544392585754, |
|
"learning_rate": 0.000331800682280803, |
|
"loss": 1.3189, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.7658227848101266, |
|
"grad_norm": 0.46605202555656433, |
|
"learning_rate": 0.0003272273137455226, |
|
"loss": 1.3199, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.7668776371308017, |
|
"grad_norm": 0.5110488533973694, |
|
"learning_rate": 0.00032271698215041863, |
|
"loss": 1.3141, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.7679324894514767, |
|
"grad_norm": 0.4910179078578949, |
|
"learning_rate": 0.0003182688186269984, |
|
"loss": 1.3187, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.7689873417721519, |
|
"grad_norm": 0.5152925252914429, |
|
"learning_rate": 0.0003138819662828017, |
|
"loss": 1.326, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.770042194092827, |
|
"grad_norm": 0.4966948926448822, |
|
"learning_rate": 0.00030955558003632966, |
|
"loss": 1.3289, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.7710970464135021, |
|
"grad_norm": 0.4542575776576996, |
|
"learning_rate": 0.0003052888264542483, |
|
"loss": 1.3136, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.7721518987341772, |
|
"grad_norm": 0.5378862023353577, |
|
"learning_rate": 0.0003010808835908368, |
|
"loss": 1.3061, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.7732067510548524, |
|
"grad_norm": 0.4870184361934662, |
|
"learning_rate": 0.00029693094082964785, |
|
"loss": 1.3139, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.7742616033755274, |
|
"grad_norm": 0.5277137160301208, |
|
"learning_rate": 0.0002928381987273508, |
|
"loss": 1.3095, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.7753164556962026, |
|
"grad_norm": 0.4717441499233246, |
|
"learning_rate": 0.0002888018688597272, |
|
"loss": 1.3065, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.7763713080168776, |
|
"grad_norm": 0.49216166138648987, |
|
"learning_rate": 0.0002848211736697894, |
|
"loss": 1.3147, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.7774261603375527, |
|
"grad_norm": 0.5425299406051636, |
|
"learning_rate": 0.00028089534631799183, |
|
"loss": 1.3152, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.7784810126582279, |
|
"grad_norm": 0.45568835735321045, |
|
"learning_rate": 0.0002770236305345076, |
|
"loss": 1.3028, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.7795358649789029, |
|
"grad_norm": 0.47306162118911743, |
|
"learning_rate": 0.00027320528047354093, |
|
"loss": 1.3118, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.7805907172995781, |
|
"grad_norm": 0.480543315410614, |
|
"learning_rate": 0.00026943956056964773, |
|
"loss": 1.3191, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.7816455696202531, |
|
"grad_norm": 0.48114633560180664, |
|
"learning_rate": 0.0002657257453960364, |
|
"loss": 1.2981, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.7827004219409283, |
|
"grad_norm": 0.49501311779022217, |
|
"learning_rate": 0.0002620631195248222, |
|
"loss": 1.3052, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.7837552742616034, |
|
"grad_norm": 0.48266351222991943, |
|
"learning_rate": 0.00025845097738920735, |
|
"loss": 1.2905, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.7848101265822784, |
|
"grad_norm": 0.5152450799942017, |
|
"learning_rate": 0.0002548886231475606, |
|
"loss": 1.2963, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.7858649789029536, |
|
"grad_norm": 0.4615631401538849, |
|
"learning_rate": 0.0002513753705493713, |
|
"loss": 1.2994, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.7869198312236287, |
|
"grad_norm": 0.47058191895484924, |
|
"learning_rate": 0.0002479105428030497, |
|
"loss": 1.3083, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.7879746835443038, |
|
"grad_norm": 0.4936390221118927, |
|
"learning_rate": 0.00024449347244555043, |
|
"loss": 1.3, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.7890295358649789, |
|
"grad_norm": 0.4663834869861603, |
|
"learning_rate": 0.00024112350121379254, |
|
"loss": 1.3077, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.790084388185654, |
|
"grad_norm": 0.464754581451416, |
|
"learning_rate": 0.000237799979917852, |
|
"loss": 1.3138, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.7911392405063291, |
|
"grad_norm": 0.49306800961494446, |
|
"learning_rate": 0.00023452226831590227, |
|
"loss": 1.3078, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.7921940928270043, |
|
"grad_norm": 0.5315954685211182, |
|
"learning_rate": 0.00023128973499087779, |
|
"loss": 1.3156, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.7932489451476793, |
|
"grad_norm": 0.45483651757240295, |
|
"learning_rate": 0.00022810175722883858, |
|
"loss": 1.2992, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.7943037974683544, |
|
"grad_norm": 0.5130757093429565, |
|
"learning_rate": 0.0002249577208990106, |
|
"loss": 1.3006, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.7953586497890295, |
|
"grad_norm": 0.5637947916984558, |
|
"learning_rate": 0.00022185702033547996, |
|
"loss": 1.2995, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.7964135021097046, |
|
"grad_norm": 0.5045897960662842, |
|
"learning_rate": 0.00021879905822051756, |
|
"loss": 1.2965, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.7974683544303798, |
|
"grad_norm": 0.5311060547828674, |
|
"learning_rate": 0.00021578324546951222, |
|
"loss": 1.2967, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.7985232067510548, |
|
"grad_norm": 0.5004249811172485, |
|
"learning_rate": 0.00021280900111748948, |
|
"loss": 1.3082, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.79957805907173, |
|
"grad_norm": 0.456435889005661, |
|
"learning_rate": 0.00020987575220719483, |
|
"loss": 1.303, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.8006329113924051, |
|
"grad_norm": 0.46554267406463623, |
|
"learning_rate": 0.00020698293367871933, |
|
"loss": 1.2948, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.8016877637130801, |
|
"grad_norm": 0.47276362776756287, |
|
"learning_rate": 0.00020412998826064692, |
|
"loss": 1.2991, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.8027426160337553, |
|
"grad_norm": 0.4799243211746216, |
|
"learning_rate": 0.00020131636636270178, |
|
"loss": 1.3118, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.8037974683544303, |
|
"grad_norm": 0.44948357343673706, |
|
"learning_rate": 0.00019854152596987523, |
|
"loss": 1.2852, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.8048523206751055, |
|
"grad_norm": 0.5160753130912781, |
|
"learning_rate": 0.00019580493253801255, |
|
"loss": 1.3087, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.8059071729957806, |
|
"grad_norm": 0.5136882066726685, |
|
"learning_rate": 0.00019310605889083838, |
|
"loss": 1.2994, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.8069620253164557, |
|
"grad_norm": 0.4668951630592346, |
|
"learning_rate": 0.0001904443851184018, |
|
"loss": 1.315, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.8080168776371308, |
|
"grad_norm": 0.4726894795894623, |
|
"learning_rate": 0.00018781939847692096, |
|
"loss": 1.2922, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.8090717299578059, |
|
"grad_norm": 0.4939499795436859, |
|
"learning_rate": 0.00018523059329000844, |
|
"loss": 1.2987, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 0.5483400225639343, |
|
"learning_rate": 0.0001826774708512579, |
|
"loss": 1.3123, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.8111814345991561, |
|
"grad_norm": 0.5137965679168701, |
|
"learning_rate": 0.00018015953932817348, |
|
"loss": 1.2875, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.8122362869198312, |
|
"grad_norm": 0.4608552157878876, |
|
"learning_rate": 0.00017767631366742332, |
|
"loss": 1.2954, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.8132911392405063, |
|
"grad_norm": 0.5277746915817261, |
|
"learning_rate": 0.00017522731550139922, |
|
"loss": 1.2946, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.8143459915611815, |
|
"grad_norm": 0.5126060843467712, |
|
"learning_rate": 0.00017281207305606407, |
|
"loss": 1.2962, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.8154008438818565, |
|
"grad_norm": 0.5055632591247559, |
|
"learning_rate": 0.00017043012106006926, |
|
"loss": 1.3032, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.8164556962025317, |
|
"grad_norm": 0.4589047431945801, |
|
"learning_rate": 0.00016808100065512528, |
|
"loss": 1.3038, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.8175105485232067, |
|
"grad_norm": 0.5196783542633057, |
|
"learning_rate": 0.00016576425930760734, |
|
"loss": 1.2913, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.8185654008438819, |
|
"grad_norm": 0.5244337320327759, |
|
"learning_rate": 0.00016347945072137934, |
|
"loss": 1.2892, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.819620253164557, |
|
"grad_norm": 0.5240455865859985, |
|
"learning_rate": 0.00016122613475181977, |
|
"loss": 1.2988, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.820675105485232, |
|
"grad_norm": 0.5089477896690369, |
|
"learning_rate": 0.00015900387732103232, |
|
"loss": 1.291, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.8217299578059072, |
|
"grad_norm": 0.5250300765037537, |
|
"learning_rate": 0.00015681225033422526, |
|
"loss": 1.3059, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.8227848101265823, |
|
"grad_norm": 0.44551143050193787, |
|
"learning_rate": 0.00015465083159724345, |
|
"loss": 1.2953, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.8238396624472574, |
|
"grad_norm": 0.5375047922134399, |
|
"learning_rate": 0.0001525192047352371, |
|
"loss": 1.2942, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.8248945147679325, |
|
"grad_norm": 0.4749961793422699, |
|
"learning_rate": 0.00015041695911245136, |
|
"loss": 1.3014, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.8259493670886076, |
|
"grad_norm": 0.4921550154685974, |
|
"learning_rate": 0.00014834368975312172, |
|
"loss": 1.2897, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.8270042194092827, |
|
"grad_norm": 0.4852724075317383, |
|
"learning_rate": 0.00014629899726345958, |
|
"loss": 1.2765, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.8280590717299579, |
|
"grad_norm": 0.4684356153011322, |
|
"learning_rate": 0.00014428248775471316, |
|
"loss": 1.2851, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.8291139240506329, |
|
"grad_norm": 0.4646863341331482, |
|
"learning_rate": 0.000142293772767289, |
|
"loss": 1.274, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.830168776371308, |
|
"grad_norm": 0.5266587138175964, |
|
"learning_rate": 0.00014033246919591922, |
|
"loss": 1.2888, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.8312236286919831, |
|
"grad_norm": 0.4876491129398346, |
|
"learning_rate": 0.00013839819921586025, |
|
"loss": 1.3102, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.8322784810126582, |
|
"grad_norm": 0.49130967259407043, |
|
"learning_rate": 0.00013649059021010894, |
|
"loss": 1.305, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.49542373418807983, |
|
"learning_rate": 0.00013460927469762155, |
|
"loss": 1.2852, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.8343881856540084, |
|
"grad_norm": 0.4820687174797058, |
|
"learning_rate": 0.00013275389026252255, |
|
"loss": 1.2938, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.8354430379746836, |
|
"grad_norm": 0.4949972629547119, |
|
"learning_rate": 0.0001309240794842889, |
|
"loss": 1.2911, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.8364978902953587, |
|
"grad_norm": 0.5055283308029175, |
|
"learning_rate": 0.00012911948986889664, |
|
"loss": 1.3017, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.8375527426160337, |
|
"grad_norm": 0.4902411997318268, |
|
"learning_rate": 0.00012733977378091664, |
|
"loss": 1.2943, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.8386075949367089, |
|
"grad_norm": 0.5116524696350098, |
|
"learning_rate": 0.00012558458837654633, |
|
"loss": 1.3001, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.8396624472573839, |
|
"grad_norm": 0.46216875314712524, |
|
"learning_rate": 0.00012385359553756422, |
|
"loss": 1.283, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.8407172995780591, |
|
"grad_norm": 0.4756139814853668, |
|
"learning_rate": 0.0001221464618061951, |
|
"loss": 1.2841, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.8417721518987342, |
|
"grad_norm": 0.4843553602695465, |
|
"learning_rate": 0.0001204628583208727, |
|
"loss": 1.2809, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.8428270042194093, |
|
"grad_norm": 0.5512832999229431, |
|
"learning_rate": 0.00011880246075288824, |
|
"loss": 1.2936, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.8438818565400844, |
|
"grad_norm": 0.5163074135780334, |
|
"learning_rate": 0.00011716494924391148, |
|
"loss": 1.2894, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.8449367088607594, |
|
"grad_norm": 0.47564107179641724, |
|
"learning_rate": 0.00011555000834437363, |
|
"loss": 1.2964, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.8459915611814346, |
|
"grad_norm": 0.5144618153572083, |
|
"learning_rate": 0.00011395732695269907, |
|
"loss": 1.2881, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.8470464135021097, |
|
"grad_norm": 0.5242222547531128, |
|
"learning_rate": 0.00011238659825537507, |
|
"loss": 1.2879, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.8481012658227848, |
|
"grad_norm": 0.46482113003730774, |
|
"learning_rate": 0.00011083751966784716, |
|
"loss": 1.3019, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.8491561181434599, |
|
"grad_norm": 0.46565577387809753, |
|
"learning_rate": 0.00010930979277622952, |
|
"loss": 1.2812, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.8502109704641351, |
|
"grad_norm": 0.4554700553417206, |
|
"learning_rate": 0.00010780312327981853, |
|
"loss": 1.2898, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.8512658227848101, |
|
"grad_norm": 0.46447324752807617, |
|
"learning_rate": 0.0001063172209343989, |
|
"loss": 1.2946, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.8523206751054853, |
|
"grad_norm": 0.4921494126319885, |
|
"learning_rate": 0.000104851799496331, |
|
"loss": 1.2951, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.8533755274261603, |
|
"grad_norm": 0.48273053765296936, |
|
"learning_rate": 0.00010340657666740917, |
|
"loss": 1.283, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.8544303797468354, |
|
"grad_norm": 0.4731471538543701, |
|
"learning_rate": 0.00010198127404047976, |
|
"loss": 1.2775, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.8554852320675106, |
|
"grad_norm": 0.5514680743217468, |
|
"learning_rate": 0.00010057561704580898, |
|
"loss": 1.2962, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.8565400843881856, |
|
"grad_norm": 0.47624343633651733, |
|
"learning_rate": 9.918933489818986e-05, |
|
"loss": 1.2869, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.8575949367088608, |
|
"grad_norm": 0.5039873123168945, |
|
"learning_rate": 9.782216054477828e-05, |
|
"loss": 1.2844, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.8586497890295358, |
|
"grad_norm": 0.4922039806842804, |
|
"learning_rate": 9.647383061364803e-05, |
|
"loss": 1.2843, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.859704641350211, |
|
"grad_norm": 0.4668273627758026, |
|
"learning_rate": 9.514408536305497e-05, |
|
"loss": 1.2804, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.8607594936708861, |
|
"grad_norm": 0.4708327054977417, |
|
"learning_rate": 9.383266863140043e-05, |
|
"loss": 1.2913, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.8618143459915611, |
|
"grad_norm": 0.47959965467453003, |
|
"learning_rate": 9.25393277878844e-05, |
|
"loss": 1.282, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.8628691983122363, |
|
"grad_norm": 0.46921077370643616, |
|
"learning_rate": 9.126381368383881e-05, |
|
"loss": 1.288, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.8639240506329114, |
|
"grad_norm": 0.5404256582260132, |
|
"learning_rate": 9.000588060473158e-05, |
|
"loss": 1.2832, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.8649789029535865, |
|
"grad_norm": 0.47403275966644287, |
|
"learning_rate": 8.876528622283232e-05, |
|
"loss": 1.2952, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.8660337552742616, |
|
"grad_norm": 0.4553932249546051, |
|
"learning_rate": 8.754179155053052e-05, |
|
"loss": 1.2874, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.8670886075949367, |
|
"grad_norm": 0.47285547852516174, |
|
"learning_rate": 8.63351608942968e-05, |
|
"loss": 1.2934, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.8681434599156118, |
|
"grad_norm": 0.460549920797348, |
|
"learning_rate": 8.514516180927926e-05, |
|
"loss": 1.2891, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.869198312236287, |
|
"grad_norm": 0.46874484419822693, |
|
"learning_rate": 8.397156505452524e-05, |
|
"loss": 1.2865, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.870253164556962, |
|
"grad_norm": 0.4736628830432892, |
|
"learning_rate": 8.28141445488205e-05, |
|
"loss": 1.3032, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.8713080168776371, |
|
"grad_norm": 0.46797215938568115, |
|
"learning_rate": 8.167267732713705e-05, |
|
"loss": 1.2918, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.8723628691983122, |
|
"grad_norm": 0.46798843145370483, |
|
"learning_rate": 8.054694349768114e-05, |
|
"loss": 1.2877, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.8734177215189873, |
|
"grad_norm": 0.4596850872039795, |
|
"learning_rate": 7.943672619953359e-05, |
|
"loss": 1.2736, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.8744725738396625, |
|
"grad_norm": 0.4654959440231323, |
|
"learning_rate": 7.834181156087357e-05, |
|
"loss": 1.2728, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.8755274261603375, |
|
"grad_norm": 0.4837932884693146, |
|
"learning_rate": 7.726198865777852e-05, |
|
"loss": 1.298, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.8765822784810127, |
|
"grad_norm": 0.48890531063079834, |
|
"learning_rate": 7.61970494735919e-05, |
|
"loss": 1.2768, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.8776371308016878, |
|
"grad_norm": 0.4484415650367737, |
|
"learning_rate": 7.514678885885086e-05, |
|
"loss": 1.2956, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.8786919831223629, |
|
"grad_norm": 0.45066314935684204, |
|
"learning_rate": 7.411100449176634e-05, |
|
"loss": 1.288, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.879746835443038, |
|
"grad_norm": 0.470375120639801, |
|
"learning_rate": 7.308949683924792e-05, |
|
"loss": 1.2746, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.880801687763713, |
|
"grad_norm": 0.48986098170280457, |
|
"learning_rate": 7.208206911846581e-05, |
|
"loss": 1.2796, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.8818565400843882, |
|
"grad_norm": 0.4666622579097748, |
|
"learning_rate": 7.10885272589427e-05, |
|
"loss": 1.2766, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.8829113924050633, |
|
"grad_norm": 0.4520988464355469, |
|
"learning_rate": 7.010867986516811e-05, |
|
"loss": 1.2841, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.8839662447257384, |
|
"grad_norm": 0.48233267664909363, |
|
"learning_rate": 6.914233817972799e-05, |
|
"loss": 1.2908, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.8850210970464135, |
|
"grad_norm": 0.4731663763523102, |
|
"learning_rate": 6.818931604694264e-05, |
|
"loss": 1.2797, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.8860759493670886, |
|
"grad_norm": 0.5251386165618896, |
|
"learning_rate": 6.724942987700563e-05, |
|
"loss": 1.2814, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.8871308016877637, |
|
"grad_norm": 0.5002477765083313, |
|
"learning_rate": 6.632249861061733e-05, |
|
"loss": 1.3002, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.8881856540084389, |
|
"grad_norm": 0.4532804787158966, |
|
"learning_rate": 6.540834368410549e-05, |
|
"loss": 1.2869, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.8892405063291139, |
|
"grad_norm": 0.46717965602874756, |
|
"learning_rate": 6.4506788995027e-05, |
|
"loss": 1.2841, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.890295358649789, |
|
"grad_norm": 0.4740357995033264, |
|
"learning_rate": 6.361766086824344e-05, |
|
"loss": 1.2787, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.8913502109704642, |
|
"grad_norm": 0.48714375495910645, |
|
"learning_rate": 6.274078802246449e-05, |
|
"loss": 1.288, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.8924050632911392, |
|
"grad_norm": 0.48944953083992004, |
|
"learning_rate": 6.187600153725223e-05, |
|
"loss": 1.2818, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.8934599156118144, |
|
"grad_norm": 0.4496278464794159, |
|
"learning_rate": 6.1023134820480546e-05, |
|
"loss": 1.2844, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.8945147679324894, |
|
"grad_norm": 0.45108094811439514, |
|
"learning_rate": 6.0182023576242725e-05, |
|
"loss": 1.2788, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.8955696202531646, |
|
"grad_norm": 0.4812398850917816, |
|
"learning_rate": 5.9352505773201664e-05, |
|
"loss": 1.2836, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.8966244725738397, |
|
"grad_norm": 0.4708564579486847, |
|
"learning_rate": 5.8534421613376175e-05, |
|
"loss": 1.2729, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.8976793248945147, |
|
"grad_norm": 0.47171875834465027, |
|
"learning_rate": 5.772761350135759e-05, |
|
"loss": 1.2842, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.8987341772151899, |
|
"grad_norm": 0.47209882736206055, |
|
"learning_rate": 5.6931926013950586e-05, |
|
"loss": 1.2815, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.8997890295358649, |
|
"grad_norm": 0.4671887159347534, |
|
"learning_rate": 5.61472058702326e-05, |
|
"loss": 1.2951, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.9008438818565401, |
|
"grad_norm": 0.46785426139831543, |
|
"learning_rate": 5.53733019020258e-05, |
|
"loss": 1.2906, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.9018987341772152, |
|
"grad_norm": 0.49801570177078247, |
|
"learning_rate": 5.4610065024776125e-05, |
|
"loss": 1.2817, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.9029535864978903, |
|
"grad_norm": 0.5166123509407043, |
|
"learning_rate": 5.38573482088337e-05, |
|
"loss": 1.2739, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.9040084388185654, |
|
"grad_norm": 0.5468111038208008, |
|
"learning_rate": 5.3115006451129075e-05, |
|
"loss": 1.2913, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.9050632911392406, |
|
"grad_norm": 0.4588988423347473, |
|
"learning_rate": 5.2382896747239935e-05, |
|
"loss": 1.2836, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.9061181434599156, |
|
"grad_norm": 0.5207972526550293, |
|
"learning_rate": 5.166087806384275e-05, |
|
"loss": 1.2896, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.9071729957805907, |
|
"grad_norm": 0.4933626353740692, |
|
"learning_rate": 5.0948811311544186e-05, |
|
"loss": 1.278, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.9082278481012658, |
|
"grad_norm": 0.4750603437423706, |
|
"learning_rate": 5.024655931808697e-05, |
|
"loss": 1.2799, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.9092827004219409, |
|
"grad_norm": 0.480411171913147, |
|
"learning_rate": 4.955398680192509e-05, |
|
"loss": 1.2792, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.9103375527426161, |
|
"grad_norm": 0.4989491403102875, |
|
"learning_rate": 4.887096034616319e-05, |
|
"loss": 1.2883, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 0.5354823470115662, |
|
"learning_rate": 4.819734837285529e-05, |
|
"loss": 1.2703, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.9124472573839663, |
|
"grad_norm": 0.4586617350578308, |
|
"learning_rate": 4.7533021117657475e-05, |
|
"loss": 1.2742, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.9135021097046413, |
|
"grad_norm": 0.49164167046546936, |
|
"learning_rate": 4.687785060483031e-05, |
|
"loss": 1.277, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.9145569620253164, |
|
"grad_norm": 0.4514996409416199, |
|
"learning_rate": 4.623171062258557e-05, |
|
"loss": 1.2834, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.9156118143459916, |
|
"grad_norm": 0.4464404284954071, |
|
"learning_rate": 4.559447669877288e-05, |
|
"loss": 1.2895, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 0.4650578200817108, |
|
"learning_rate": 4.496602607690141e-05, |
|
"loss": 1.2868, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.9177215189873418, |
|
"grad_norm": 0.48441407084465027, |
|
"learning_rate": 4.434623769249217e-05, |
|
"loss": 1.282, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.9187763713080169, |
|
"grad_norm": 0.46642568707466125, |
|
"learning_rate": 4.373499214975615e-05, |
|
"loss": 1.2803, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.919831223628692, |
|
"grad_norm": 0.4851605296134949, |
|
"learning_rate": 4.313217169859397e-05, |
|
"loss": 1.2756, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.9208860759493671, |
|
"grad_norm": 0.469745934009552, |
|
"learning_rate": 4.253766021191256e-05, |
|
"loss": 1.2882, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.9219409282700421, |
|
"grad_norm": 0.47525298595428467, |
|
"learning_rate": 4.19513431632545e-05, |
|
"loss": 1.2776, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.9229957805907173, |
|
"grad_norm": 0.5118164420127869, |
|
"learning_rate": 4.1373107604735626e-05, |
|
"loss": 1.2749, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.9240506329113924, |
|
"grad_norm": 0.5095266103744507, |
|
"learning_rate": 4.0802842145286876e-05, |
|
"loss": 1.2656, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.9251054852320675, |
|
"grad_norm": 0.4578050673007965, |
|
"learning_rate": 4.024043692919589e-05, |
|
"loss": 1.276, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.9261603375527426, |
|
"grad_norm": 0.46929845213890076, |
|
"learning_rate": 3.968578361494449e-05, |
|
"loss": 1.2813, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.9272151898734177, |
|
"grad_norm": 0.4484109580516815, |
|
"learning_rate": 3.91387753543378e-05, |
|
"loss": 1.2757, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.9282700421940928, |
|
"grad_norm": 0.46053996682167053, |
|
"learning_rate": 3.859930677192103e-05, |
|
"loss": 1.2739, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.929324894514768, |
|
"grad_norm": 0.4593728184700012, |
|
"learning_rate": 3.806727394468005e-05, |
|
"loss": 1.2912, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.930379746835443, |
|
"grad_norm": 0.4655931890010834, |
|
"learning_rate": 3.7542574382021635e-05, |
|
"loss": 1.2773, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.9314345991561181, |
|
"grad_norm": 0.4637735188007355, |
|
"learning_rate": 3.702510700602975e-05, |
|
"loss": 1.2937, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.9324894514767933, |
|
"grad_norm": 0.47105222940444946, |
|
"learning_rate": 3.651477213199394e-05, |
|
"loss": 1.2797, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.9335443037974683, |
|
"grad_norm": 0.5045534372329712, |
|
"learning_rate": 3.601147144920609e-05, |
|
"loss": 1.2798, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.9345991561181435, |
|
"grad_norm": 0.517986536026001, |
|
"learning_rate": 3.5515108002021946e-05, |
|
"loss": 1.279, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.9356540084388185, |
|
"grad_norm": 0.46878963708877563, |
|
"learning_rate": 3.502558617118352e-05, |
|
"loss": 1.2809, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.9367088607594937, |
|
"grad_norm": 0.4535967707633972, |
|
"learning_rate": 3.454281165539913e-05, |
|
"loss": 1.2826, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.9377637130801688, |
|
"grad_norm": 0.4552432596683502, |
|
"learning_rate": 3.406669145317717e-05, |
|
"loss": 1.2841, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.9388185654008439, |
|
"grad_norm": 0.46975669264793396, |
|
"learning_rate": 3.359713384491036e-05, |
|
"loss": 1.2745, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.939873417721519, |
|
"grad_norm": 0.4926530122756958, |
|
"learning_rate": 3.313404837520694e-05, |
|
"loss": 1.2733, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.9409282700421941, |
|
"grad_norm": 0.43800973892211914, |
|
"learning_rate": 3.267734583546536e-05, |
|
"loss": 1.2807, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.9419831223628692, |
|
"grad_norm": 0.4557863473892212, |
|
"learning_rate": 3.222693824668916e-05, |
|
"loss": 1.2766, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.9430379746835443, |
|
"grad_norm": 0.468304306268692, |
|
"learning_rate": 3.178273884253874e-05, |
|
"loss": 1.2771, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.9440928270042194, |
|
"grad_norm": 0.4833139479160309, |
|
"learning_rate": 3.134466205261674e-05, |
|
"loss": 1.2791, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.9451476793248945, |
|
"grad_norm": 0.45856285095214844, |
|
"learning_rate": 3.0912623485983774e-05, |
|
"loss": 1.2837, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.9462025316455697, |
|
"grad_norm": 0.5141430497169495, |
|
"learning_rate": 3.048653991490141e-05, |
|
"loss": 1.2798, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.9472573839662447, |
|
"grad_norm": 0.5297994613647461, |
|
"learning_rate": 3.0066329258799184e-05, |
|
"loss": 1.2774, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.9483122362869199, |
|
"grad_norm": 0.4530332684516907, |
|
"learning_rate": 2.965191056846266e-05, |
|
"loss": 1.2842, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.9493670886075949, |
|
"grad_norm": 0.4460042715072632, |
|
"learning_rate": 2.9243204010439396e-05, |
|
"loss": 1.2763, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.95042194092827, |
|
"grad_norm": 0.4769385755062103, |
|
"learning_rate": 2.8840130851659852e-05, |
|
"loss": 1.2765, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.9514767932489452, |
|
"grad_norm": 0.47912001609802246, |
|
"learning_rate": 2.844261344427029e-05, |
|
"loss": 1.2743, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.9525316455696202, |
|
"grad_norm": 0.5202059745788574, |
|
"learning_rate": 2.805057521067472e-05, |
|
"loss": 1.2942, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.9535864978902954, |
|
"grad_norm": 0.46697160601615906, |
|
"learning_rate": 2.766394062878302e-05, |
|
"loss": 1.2704, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.9546413502109705, |
|
"grad_norm": 0.4940732419490814, |
|
"learning_rate": 2.7282635217462405e-05, |
|
"loss": 1.2817, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.9556962025316456, |
|
"grad_norm": 0.4639584422111511, |
|
"learning_rate": 2.6906585522189378e-05, |
|
"loss": 1.2827, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.9567510548523207, |
|
"grad_norm": 0.4650475084781647, |
|
"learning_rate": 2.653571910089951e-05, |
|
"loss": 1.2828, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.9578059071729957, |
|
"grad_norm": 0.4645904302597046, |
|
"learning_rate": 2.6169964510032243e-05, |
|
"loss": 1.2831, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.9588607594936709, |
|
"grad_norm": 0.47728443145751953, |
|
"learning_rate": 2.580925129076798e-05, |
|
"loss": 1.2845, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.959915611814346, |
|
"grad_norm": 0.4670153856277466, |
|
"learning_rate": 2.5453509955454954e-05, |
|
"loss": 1.2631, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.9609704641350211, |
|
"grad_norm": 0.4669053554534912, |
|
"learning_rate": 2.510267197422317e-05, |
|
"loss": 1.2765, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.9620253164556962, |
|
"grad_norm": 0.4864523410797119, |
|
"learning_rate": 2.4756669761782806e-05, |
|
"loss": 1.2864, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.9630801687763713, |
|
"grad_norm": 0.46119776368141174, |
|
"learning_rate": 2.4415436664404643e-05, |
|
"loss": 1.2686, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.9641350210970464, |
|
"grad_norm": 0.5017858743667603, |
|
"learning_rate": 2.4078906947079882e-05, |
|
"loss": 1.2902, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.9651898734177216, |
|
"grad_norm": 0.49536940455436707, |
|
"learning_rate": 2.3747015780857007e-05, |
|
"loss": 1.2777, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.9662447257383966, |
|
"grad_norm": 0.46517708897590637, |
|
"learning_rate": 2.3419699230353144e-05, |
|
"loss": 1.2829, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.9672995780590717, |
|
"grad_norm": 0.47231581807136536, |
|
"learning_rate": 2.3096894241437583e-05, |
|
"loss": 1.2837, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.9683544303797469, |
|
"grad_norm": 0.45664721727371216, |
|
"learning_rate": 2.2778538629085057e-05, |
|
"loss": 1.2796, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.9694092827004219, |
|
"grad_norm": 0.48494887351989746, |
|
"learning_rate": 2.2464571065396428e-05, |
|
"loss": 1.2743, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.9704641350210971, |
|
"grad_norm": 0.4595504403114319, |
|
"learning_rate": 2.2154931067784525e-05, |
|
"loss": 1.2776, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.9715189873417721, |
|
"grad_norm": 0.4692482650279999, |
|
"learning_rate": 2.1849558987322783e-05, |
|
"loss": 1.2775, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.9725738396624473, |
|
"grad_norm": 0.4805602431297302, |
|
"learning_rate": 2.1548395997254516e-05, |
|
"loss": 1.2744, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.9736286919831224, |
|
"grad_norm": 0.44999340176582336, |
|
"learning_rate": 2.1251384081660546e-05, |
|
"loss": 1.2674, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.9746835443037974, |
|
"grad_norm": 0.45670419931411743, |
|
"learning_rate": 2.0958466024283035e-05, |
|
"loss": 1.261, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.9757383966244726, |
|
"grad_norm": 0.4626820981502533, |
|
"learning_rate": 2.0669585397503362e-05, |
|
"loss": 1.2735, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.9767932489451476, |
|
"grad_norm": 0.4641728699207306, |
|
"learning_rate": 2.0384686551471954e-05, |
|
"loss": 1.2683, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.9778481012658228, |
|
"grad_norm": 0.45692700147628784, |
|
"learning_rate": 2.0103714603387898e-05, |
|
"loss": 1.2813, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.9789029535864979, |
|
"grad_norm": 0.44312015175819397, |
|
"learning_rate": 1.9826615426926342e-05, |
|
"loss": 1.2699, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.979957805907173, |
|
"grad_norm": 0.46310457587242126, |
|
"learning_rate": 1.9553335641811623e-05, |
|
"loss": 1.276, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.9810126582278481, |
|
"grad_norm": 0.47141996026039124, |
|
"learning_rate": 1.9283822603534143e-05, |
|
"loss": 1.2746, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.9820675105485233, |
|
"grad_norm": 0.48251011967658997, |
|
"learning_rate": 1.90180243932089e-05, |
|
"loss": 1.2757, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.9831223628691983, |
|
"grad_norm": 0.4870384633541107, |
|
"learning_rate": 1.8755889807573868e-05, |
|
"loss": 1.2703, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.9841772151898734, |
|
"grad_norm": 0.4840216338634491, |
|
"learning_rate": 1.8497368349126255e-05, |
|
"loss": 1.2829, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.9852320675105485, |
|
"grad_norm": 0.46854716539382935, |
|
"learning_rate": 1.824241021639465e-05, |
|
"loss": 1.282, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.9862869198312236, |
|
"grad_norm": 0.46142417192459106, |
|
"learning_rate": 1.799096629434529e-05, |
|
"loss": 1.2914, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.9873417721518988, |
|
"grad_norm": 0.4705776274204254, |
|
"learning_rate": 1.7742988144920578e-05, |
|
"loss": 1.284, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.9883966244725738, |
|
"grad_norm": 0.4654499292373657, |
|
"learning_rate": 1.7498427997707978e-05, |
|
"loss": 1.282, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.989451476793249, |
|
"grad_norm": 0.46457529067993164, |
|
"learning_rate": 1.7257238740737548e-05, |
|
"loss": 1.2679, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.990506329113924, |
|
"grad_norm": 0.49462538957595825, |
|
"learning_rate": 1.7019373911406307e-05, |
|
"loss": 1.2722, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.9915611814345991, |
|
"grad_norm": 0.4786498546600342, |
|
"learning_rate": 1.67847876875277e-05, |
|
"loss": 1.2831, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.9926160337552743, |
|
"grad_norm": 0.4924539625644684, |
|
"learning_rate": 1.655343487850443e-05, |
|
"loss": 1.2585, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.9936708860759493, |
|
"grad_norm": 0.4809725284576416, |
|
"learning_rate": 1.6325270916622947e-05, |
|
"loss": 1.2792, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.9947257383966245, |
|
"grad_norm": 0.4645226299762726, |
|
"learning_rate": 1.610025184846797e-05, |
|
"loss": 1.2787, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.9957805907172996, |
|
"grad_norm": 0.4642578661441803, |
|
"learning_rate": 1.587833432645528e-05, |
|
"loss": 1.2749, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.9968354430379747, |
|
"grad_norm": 0.4650630056858063, |
|
"learning_rate": 1.5659475600481297e-05, |
|
"loss": 1.2838, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.9978902953586498, |
|
"grad_norm": 0.4606262743473053, |
|
"learning_rate": 1.544363350968769e-05, |
|
"loss": 1.2703, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.9989451476793249, |
|
"grad_norm": 0.4388413727283478, |
|
"learning_rate": 1.523076647433954e-05, |
|
"loss": 1.2801, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.3256233930587769, |
|
"learning_rate": 1.5020833487815421e-05, |
|
"loss": 1.2745, |
|
"step": 9480 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 9480, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.832308198648013e+16, |
|
"train_batch_size": 1024, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|