|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 9480, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010548523206751054, |
|
"grad_norm": 1.1911535263061523, |
|
"learning_rate": 0.00015822784810126583, |
|
"loss": 7.514, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002109704641350211, |
|
"grad_norm": 1.1261857748031616, |
|
"learning_rate": 0.00031645569620253165, |
|
"loss": 6.971, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0031645569620253164, |
|
"grad_norm": 0.8440089821815491, |
|
"learning_rate": 0.00047468354430379745, |
|
"loss": 6.3116, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004219409282700422, |
|
"grad_norm": 1.437301754951477, |
|
"learning_rate": 0.0006329113924050633, |
|
"loss": 5.8497, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.005274261603375527, |
|
"grad_norm": 1.4225043058395386, |
|
"learning_rate": 0.0007911392405063291, |
|
"loss": 5.5173, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.006329113924050633, |
|
"grad_norm": 0.9975273013114929, |
|
"learning_rate": 0.0009493670886075949, |
|
"loss": 5.1521, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.007383966244725738, |
|
"grad_norm": 1.0305777788162231, |
|
"learning_rate": 0.0011075949367088608, |
|
"loss": 4.7579, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.008438818565400843, |
|
"grad_norm": 1.3440488576889038, |
|
"learning_rate": 0.0012658227848101266, |
|
"loss": 4.4427, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00949367088607595, |
|
"grad_norm": 1.0169594287872314, |
|
"learning_rate": 0.0014240506329113926, |
|
"loss": 4.1901, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.010548523206751054, |
|
"grad_norm": 0.8410079479217529, |
|
"learning_rate": 0.0015, |
|
"loss": 4.0012, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011603375527426161, |
|
"grad_norm": 0.6870416402816772, |
|
"learning_rate": 0.0015, |
|
"loss": 3.8088, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.012658227848101266, |
|
"grad_norm": 0.6572973728179932, |
|
"learning_rate": 0.0015, |
|
"loss": 3.6725, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.013713080168776372, |
|
"grad_norm": 0.535599410533905, |
|
"learning_rate": 0.0015, |
|
"loss": 3.5343, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.014767932489451477, |
|
"grad_norm": 0.8405603766441345, |
|
"learning_rate": 0.0015, |
|
"loss": 3.4165, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.015822784810126583, |
|
"grad_norm": 0.5135706663131714, |
|
"learning_rate": 0.0015, |
|
"loss": 3.3249, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.016877637130801686, |
|
"grad_norm": 0.6049426794052124, |
|
"learning_rate": 0.0015, |
|
"loss": 3.2309, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.017932489451476793, |
|
"grad_norm": 0.6043658256530762, |
|
"learning_rate": 0.0015, |
|
"loss": 3.1535, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0189873417721519, |
|
"grad_norm": 0.7351441383361816, |
|
"learning_rate": 0.0015, |
|
"loss": 3.1078, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.020042194092827006, |
|
"grad_norm": 0.5547571182250977, |
|
"learning_rate": 0.0015, |
|
"loss": 3.0352, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.02109704641350211, |
|
"grad_norm": 0.7150177955627441, |
|
"learning_rate": 0.0015, |
|
"loss": 2.9785, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.022151898734177215, |
|
"grad_norm": 0.5605180263519287, |
|
"learning_rate": 0.0015, |
|
"loss": 2.9324, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.023206751054852322, |
|
"grad_norm": 0.5501560568809509, |
|
"learning_rate": 0.0015, |
|
"loss": 2.892, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.024261603375527425, |
|
"grad_norm": 0.5405913591384888, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8553, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.02531645569620253, |
|
"grad_norm": 0.44567427039146423, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8098, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.026371308016877638, |
|
"grad_norm": 0.5209677815437317, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7686, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.027426160337552744, |
|
"grad_norm": 0.5224133133888245, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7305, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.028481012658227847, |
|
"grad_norm": 0.561279296875, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7045, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.029535864978902954, |
|
"grad_norm": 0.48504865169525146, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6616, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.03059071729957806, |
|
"grad_norm": 0.5276456475257874, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6373, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.03164556962025317, |
|
"grad_norm": 0.7674102783203125, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6083, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.03270042194092827, |
|
"grad_norm": 0.5037146806716919, |
|
"learning_rate": 0.0015, |
|
"loss": 2.583, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03375527426160337, |
|
"grad_norm": 0.4920331835746765, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5358, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03481012658227848, |
|
"grad_norm": 0.6064138412475586, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5224, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.035864978902953586, |
|
"grad_norm": 0.5508156418800354, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4869, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03691983122362869, |
|
"grad_norm": 0.46913155913352966, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4401, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0379746835443038, |
|
"grad_norm": 0.6472739577293396, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4113, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.039029535864978905, |
|
"grad_norm": 0.5063979029655457, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3883, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.04008438818565401, |
|
"grad_norm": 0.5765858888626099, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3691, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.04113924050632911, |
|
"grad_norm": 0.5593698620796204, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3406, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.04219409282700422, |
|
"grad_norm": 0.7147346138954163, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3005, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.043248945147679324, |
|
"grad_norm": 0.5318769812583923, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2975, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.04430379746835443, |
|
"grad_norm": 0.5068933963775635, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2618, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.04535864978902954, |
|
"grad_norm": 0.5029956102371216, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2284, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.046413502109704644, |
|
"grad_norm": 0.5769665241241455, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2023, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04746835443037975, |
|
"grad_norm": 0.6128251552581787, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.04852320675105485, |
|
"grad_norm": 0.530529260635376, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1738, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.049578059071729956, |
|
"grad_norm": 0.5000449419021606, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1554, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.05063291139240506, |
|
"grad_norm": 0.7125113606452942, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1395, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.05168776371308017, |
|
"grad_norm": 0.4939398467540741, |
|
"learning_rate": 0.0015, |
|
"loss": 2.105, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.052742616033755275, |
|
"grad_norm": 0.5103901028633118, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0902, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05379746835443038, |
|
"grad_norm": 0.4903666377067566, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0743, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.05485232067510549, |
|
"grad_norm": 0.6215901374816895, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0572, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.05590717299578059, |
|
"grad_norm": 0.5370311141014099, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0422, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.056962025316455694, |
|
"grad_norm": 0.583985447883606, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0237, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.0580168776371308, |
|
"grad_norm": 0.48627397418022156, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0133, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.05907172995780591, |
|
"grad_norm": 0.46253713965415955, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9705, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.060126582278481014, |
|
"grad_norm": 0.5268188118934631, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9772, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.06118143459915612, |
|
"grad_norm": 0.526195764541626, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9743, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.06223628691983123, |
|
"grad_norm": 0.501920759677887, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9446, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.06329113924050633, |
|
"grad_norm": 0.5201966166496277, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9259, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.06434599156118144, |
|
"grad_norm": 0.49704989790916443, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9233, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.06540084388185655, |
|
"grad_norm": 0.46477070450782776, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9054, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.06645569620253164, |
|
"grad_norm": 0.5311816334724426, |
|
"learning_rate": 0.0015, |
|
"loss": 1.892, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.06751054852320675, |
|
"grad_norm": 0.6093870401382446, |
|
"learning_rate": 0.0015, |
|
"loss": 1.907, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.06856540084388185, |
|
"grad_norm": 0.5302767157554626, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8872, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.06962025316455696, |
|
"grad_norm": 0.5349369049072266, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8741, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.07067510548523206, |
|
"grad_norm": 0.5523291230201721, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8571, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.07172995780590717, |
|
"grad_norm": 0.45728522539138794, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8468, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.07278481012658228, |
|
"grad_norm": 0.49975475668907166, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8546, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.07383966244725738, |
|
"grad_norm": 0.5351147651672363, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8344, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.07489451476793249, |
|
"grad_norm": 0.4953480362892151, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8199, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.0759493670886076, |
|
"grad_norm": 0.5598252415657043, |
|
"learning_rate": 0.0015, |
|
"loss": 1.806, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.0770042194092827, |
|
"grad_norm": 0.636795699596405, |
|
"learning_rate": 0.0015, |
|
"loss": 1.795, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.07805907172995781, |
|
"grad_norm": 0.5506213903427124, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8043, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.07911392405063292, |
|
"grad_norm": 0.5925042033195496, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7829, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.08016877637130802, |
|
"grad_norm": 0.7445921301841736, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7803, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.08122362869198312, |
|
"grad_norm": 0.48227658867836, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7703, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.08227848101265822, |
|
"grad_norm": 0.5093647837638855, |
|
"learning_rate": 0.0015, |
|
"loss": 1.768, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 0.4523041844367981, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7647, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.08438818565400844, |
|
"grad_norm": 0.48087066411972046, |
|
"learning_rate": 0.0015, |
|
"loss": 1.743, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.08544303797468354, |
|
"grad_norm": 0.5096999406814575, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7307, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.08649789029535865, |
|
"grad_norm": 0.557677149772644, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7368, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.08755274261603375, |
|
"grad_norm": 0.5189062356948853, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7393, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.08860759493670886, |
|
"grad_norm": 0.48466941714286804, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7153, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.08966244725738397, |
|
"grad_norm": 0.508627712726593, |
|
"learning_rate": 0.0015, |
|
"loss": 1.713, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.09071729957805907, |
|
"grad_norm": 0.44129225611686707, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7096, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.09177215189873418, |
|
"grad_norm": 0.5102670788764954, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7063, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.09282700421940929, |
|
"grad_norm": 0.5179425477981567, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7001, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.0938818565400844, |
|
"grad_norm": 0.46534618735313416, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6878, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.0949367088607595, |
|
"grad_norm": 0.4997912049293518, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6861, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.09599156118143459, |
|
"grad_norm": 0.508382260799408, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6949, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.0970464135021097, |
|
"grad_norm": 0.4784116744995117, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6795, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.0981012658227848, |
|
"grad_norm": 0.4688781797885895, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6661, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.09915611814345991, |
|
"grad_norm": 0.5456479787826538, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6739, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.10021097046413502, |
|
"grad_norm": 0.5274268984794617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6537, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 0.4864138960838318, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6592, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.10232067510548523, |
|
"grad_norm": 0.5758424997329712, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6613, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.10337552742616034, |
|
"grad_norm": 0.4512311816215515, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6414, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.10443037974683544, |
|
"grad_norm": 0.45962268114089966, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6391, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.10548523206751055, |
|
"grad_norm": 0.5761123895645142, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6495, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.10654008438818566, |
|
"grad_norm": 0.4733625650405884, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6366, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.10759493670886076, |
|
"grad_norm": 0.5327010154724121, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6276, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.10864978902953587, |
|
"grad_norm": 0.49683907628059387, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6303, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.10970464135021098, |
|
"grad_norm": 0.5400471687316895, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6251, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.11075949367088607, |
|
"grad_norm": 0.48789605498313904, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6224, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.11181434599156118, |
|
"grad_norm": 0.45513924956321716, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6116, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.11286919831223628, |
|
"grad_norm": 0.46881577372550964, |
|
"learning_rate": 0.0015, |
|
"loss": 1.602, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.11392405063291139, |
|
"grad_norm": 0.4709598124027252, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5996, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.1149789029535865, |
|
"grad_norm": 0.43881508708000183, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6087, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1160337552742616, |
|
"grad_norm": 0.4861796796321869, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5978, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.11708860759493671, |
|
"grad_norm": 0.4598830044269562, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6007, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.11814345991561181, |
|
"grad_norm": 0.46872007846832275, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5916, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.11919831223628692, |
|
"grad_norm": 0.5027231574058533, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5836, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.12025316455696203, |
|
"grad_norm": 0.4448629319667816, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5928, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.12130801687763713, |
|
"grad_norm": 0.5146149396896362, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5788, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.12236286919831224, |
|
"grad_norm": 0.4398585557937622, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5838, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.12341772151898735, |
|
"grad_norm": 0.43229490518569946, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5731, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.12447257383966245, |
|
"grad_norm": 0.44356974959373474, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5594, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.12552742616033755, |
|
"grad_norm": 0.4866366684436798, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5628, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.12658227848101267, |
|
"grad_norm": 0.4466060698032379, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5753, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.12763713080168776, |
|
"grad_norm": 0.5837816596031189, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5668, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.12869198312236288, |
|
"grad_norm": 0.4354594647884369, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5527, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.12974683544303797, |
|
"grad_norm": 0.43021681904792786, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5443, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.1308016877637131, |
|
"grad_norm": 0.5192949771881104, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5602, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.13185654008438819, |
|
"grad_norm": 0.41557225584983826, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5582, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.13291139240506328, |
|
"grad_norm": 0.4372217655181885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5435, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.1339662447257384, |
|
"grad_norm": 0.4619849920272827, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5426, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.1350210970464135, |
|
"grad_norm": 0.5369654297828674, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5554, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.1360759493670886, |
|
"grad_norm": 0.43552911281585693, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5421, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.1371308016877637, |
|
"grad_norm": 0.4249727427959442, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5318, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.13818565400843882, |
|
"grad_norm": 0.4574583172798157, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5332, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.13924050632911392, |
|
"grad_norm": 0.4392021596431732, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5371, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.14029535864978904, |
|
"grad_norm": 0.6404235363006592, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5272, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.14135021097046413, |
|
"grad_norm": 0.4825304448604584, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5276, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.14240506329113925, |
|
"grad_norm": 0.4290938079357147, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5289, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.14345991561181434, |
|
"grad_norm": 0.42625662684440613, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5239, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.14451476793248946, |
|
"grad_norm": 0.5395568013191223, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5224, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.14556962025316456, |
|
"grad_norm": 0.4960879385471344, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5122, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.14662447257383968, |
|
"grad_norm": 0.4987901747226715, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5219, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.14767932489451477, |
|
"grad_norm": 0.5155091881752014, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5035, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.14873417721518986, |
|
"grad_norm": 0.4964143931865692, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5091, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.14978902953586498, |
|
"grad_norm": 0.5313318967819214, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5126, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.15084388185654007, |
|
"grad_norm": 0.5072162747383118, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5043, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 0.4389128088951111, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5022, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.1529535864978903, |
|
"grad_norm": 0.44489845633506775, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5024, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.1540084388185654, |
|
"grad_norm": 0.4190062880516052, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4957, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.1550632911392405, |
|
"grad_norm": 0.5346754789352417, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4955, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.15611814345991562, |
|
"grad_norm": 0.472139447927475, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5004, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.1571729957805907, |
|
"grad_norm": 0.4273566007614136, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4983, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.15822784810126583, |
|
"grad_norm": 0.5061330199241638, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4967, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.15928270042194093, |
|
"grad_norm": 0.4574885964393616, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4999, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.16033755274261605, |
|
"grad_norm": 0.5211601257324219, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4977, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.16139240506329114, |
|
"grad_norm": 0.44409823417663574, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4798, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.16244725738396623, |
|
"grad_norm": 0.4143390357494354, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4666, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.16350210970464135, |
|
"grad_norm": 0.45285868644714355, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4724, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.16455696202531644, |
|
"grad_norm": 0.44844868779182434, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4721, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.16561181434599156, |
|
"grad_norm": 0.43528443574905396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4662, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.4710999131202698, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4786, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.16772151898734178, |
|
"grad_norm": 0.4777051508426666, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4755, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.16877637130801687, |
|
"grad_norm": 0.43375569581985474, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4753, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.169831223628692, |
|
"grad_norm": 0.44435209035873413, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4761, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.17088607594936708, |
|
"grad_norm": 0.4641205370426178, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4454, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.1719409282700422, |
|
"grad_norm": 0.43654873967170715, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4578, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.1729957805907173, |
|
"grad_norm": 0.43061891198158264, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4605, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.17405063291139242, |
|
"grad_norm": 0.4999637007713318, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4506, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.1751054852320675, |
|
"grad_norm": 0.4092038571834564, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4538, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.17616033755274263, |
|
"grad_norm": 0.4975314438343048, |
|
"learning_rate": 0.0015, |
|
"loss": 1.464, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.17721518987341772, |
|
"grad_norm": 0.5646780133247375, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4501, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.17827004219409281, |
|
"grad_norm": 0.436535120010376, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4464, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.17932489451476794, |
|
"grad_norm": 0.43750905990600586, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4623, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.18037974683544303, |
|
"grad_norm": 0.4112912118434906, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4389, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.18143459915611815, |
|
"grad_norm": 0.39078304171562195, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4371, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.18248945147679324, |
|
"grad_norm": 0.4432763159275055, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4397, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.18354430379746836, |
|
"grad_norm": 0.42006704211235046, |
|
"learning_rate": 0.0015, |
|
"loss": 1.443, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.18459915611814345, |
|
"grad_norm": 0.44937148690223694, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4642, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.18565400843881857, |
|
"grad_norm": 0.4456152617931366, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4428, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.18670886075949367, |
|
"grad_norm": 0.4034252464771271, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4444, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.1877637130801688, |
|
"grad_norm": 0.41375723481178284, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4463, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.18881856540084388, |
|
"grad_norm": 0.44860073924064636, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4482, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.189873417721519, |
|
"grad_norm": 0.4023696482181549, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4437, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.1909282700421941, |
|
"grad_norm": 0.42736566066741943, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4281, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.19198312236286919, |
|
"grad_norm": 0.48598966002464294, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4277, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.1930379746835443, |
|
"grad_norm": 0.45729580521583557, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4284, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.1940928270042194, |
|
"grad_norm": 0.41488245129585266, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4288, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.19514767932489452, |
|
"grad_norm": 0.4101181626319885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4296, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.1962025316455696, |
|
"grad_norm": 0.3972269296646118, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4295, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.19725738396624473, |
|
"grad_norm": 0.4474700391292572, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4327, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.19831223628691982, |
|
"grad_norm": 0.4976608157157898, |
|
"learning_rate": 0.0015, |
|
"loss": 1.421, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.19936708860759494, |
|
"grad_norm": 0.47018617391586304, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4293, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.20042194092827004, |
|
"grad_norm": 0.43353503942489624, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4208, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.20147679324894516, |
|
"grad_norm": 0.4655958116054535, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4235, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.40534839034080505, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4226, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.20358649789029537, |
|
"grad_norm": 0.46445077657699585, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4226, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.20464135021097046, |
|
"grad_norm": 0.3819921314716339, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4135, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.20569620253164558, |
|
"grad_norm": 0.3935282528400421, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4088, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.20675105485232068, |
|
"grad_norm": 0.42892390489578247, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4121, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.20780590717299577, |
|
"grad_norm": 0.40014711022377014, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4235, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.2088607594936709, |
|
"grad_norm": 0.48264893889427185, |
|
"learning_rate": 0.0015, |
|
"loss": 1.405, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.20991561181434598, |
|
"grad_norm": 0.4335213303565979, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4061, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.2109704641350211, |
|
"grad_norm": 0.4485473036766052, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4045, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2120253164556962, |
|
"grad_norm": 0.3801015317440033, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4107, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.21308016877637131, |
|
"grad_norm": 0.5835695862770081, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4223, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.2141350210970464, |
|
"grad_norm": 0.4383508861064911, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4068, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.21518987341772153, |
|
"grad_norm": 0.3987447917461395, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4073, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.21624472573839662, |
|
"grad_norm": 0.4258471727371216, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4054, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.21729957805907174, |
|
"grad_norm": 0.4014463424682617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3986, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.21835443037974683, |
|
"grad_norm": 0.4350442588329315, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4018, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.21940928270042195, |
|
"grad_norm": 0.44266244769096375, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4064, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.22046413502109705, |
|
"grad_norm": 0.4371784031391144, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3977, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.22151898734177214, |
|
"grad_norm": 0.5309934020042419, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4059, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.22257383966244726, |
|
"grad_norm": 0.39169225096702576, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3954, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.22362869198312235, |
|
"grad_norm": 0.4109102189540863, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3929, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.22468354430379747, |
|
"grad_norm": 0.41619110107421875, |
|
"learning_rate": 0.0015, |
|
"loss": 1.395, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.22573839662447256, |
|
"grad_norm": 0.38766422867774963, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3884, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.22679324894514769, |
|
"grad_norm": 0.4916270971298218, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3973, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.22784810126582278, |
|
"grad_norm": 0.5352882146835327, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4078, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.2289029535864979, |
|
"grad_norm": 0.4140731394290924, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3921, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.229957805907173, |
|
"grad_norm": 0.41106846928596497, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3766, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.2310126582278481, |
|
"grad_norm": 0.5240281224250793, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3942, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.2320675105485232, |
|
"grad_norm": 0.48931968212127686, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3958, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.23312236286919832, |
|
"grad_norm": 0.40602126717567444, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3791, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.23417721518987342, |
|
"grad_norm": 0.36141782999038696, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3915, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.23523206751054854, |
|
"grad_norm": 0.550554096698761, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3864, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.23628691983122363, |
|
"grad_norm": 0.4383642077445984, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3855, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.23734177215189872, |
|
"grad_norm": 0.4155581295490265, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3821, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.23839662447257384, |
|
"grad_norm": 0.5224567651748657, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3723, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.23945147679324894, |
|
"grad_norm": 0.40350115299224854, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4061, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.24050632911392406, |
|
"grad_norm": 0.4014328420162201, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3902, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.24156118143459915, |
|
"grad_norm": 0.45540615916252136, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3805, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.24261603375527427, |
|
"grad_norm": 0.39433974027633667, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3777, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.24367088607594936, |
|
"grad_norm": 0.3825417459011078, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3742, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.24472573839662448, |
|
"grad_norm": 0.4290764331817627, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3864, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.24578059071729957, |
|
"grad_norm": 0.3883923590183258, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3689, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.2468354430379747, |
|
"grad_norm": 0.43312767148017883, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3621, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.2478902953586498, |
|
"grad_norm": 0.4548530876636505, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3653, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.2489451476793249, |
|
"grad_norm": 0.5046688914299011, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3775, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4298846125602722, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3738, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.2510548523206751, |
|
"grad_norm": 0.4356420636177063, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3683, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.2521097046413502, |
|
"grad_norm": 0.4656219184398651, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3709, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.3807707130908966, |
|
"learning_rate": 0.0015, |
|
"loss": 1.369, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.2542194092827004, |
|
"grad_norm": 0.4016205370426178, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3717, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.2552742616033755, |
|
"grad_norm": 0.3906775116920471, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3662, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.2563291139240506, |
|
"grad_norm": 0.4661619961261749, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3614, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.25738396624472576, |
|
"grad_norm": 0.42744624614715576, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3732, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.25843881856540085, |
|
"grad_norm": 0.3593474328517914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3596, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.25949367088607594, |
|
"grad_norm": 0.36683934926986694, |
|
"learning_rate": 0.0015, |
|
"loss": 1.37, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.26054852320675104, |
|
"grad_norm": 0.47187283635139465, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3627, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.2616033755274262, |
|
"grad_norm": 0.36955729126930237, |
|
"learning_rate": 0.0015, |
|
"loss": 1.367, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.2626582278481013, |
|
"grad_norm": 0.4172874689102173, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3564, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.26371308016877637, |
|
"grad_norm": 0.42587655782699585, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3608, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.26476793248945146, |
|
"grad_norm": 0.3867819905281067, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3616, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.26582278481012656, |
|
"grad_norm": 0.4664280414581299, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3648, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.2668776371308017, |
|
"grad_norm": 0.43253934383392334, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3612, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.2679324894514768, |
|
"grad_norm": 0.5740087628364563, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3582, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.2689873417721519, |
|
"grad_norm": 0.4363726079463959, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3555, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.270042194092827, |
|
"grad_norm": 0.38443291187286377, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3625, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.27109704641350213, |
|
"grad_norm": 0.45384156703948975, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3583, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.2721518987341772, |
|
"grad_norm": 0.36326706409454346, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3557, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.2732067510548523, |
|
"grad_norm": 0.37362417578697205, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3612, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.2742616033755274, |
|
"grad_norm": 0.36792299151420593, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3639, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.27531645569620256, |
|
"grad_norm": 0.39010557532310486, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3571, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.27637130801687765, |
|
"grad_norm": 0.4335007071495056, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3581, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.27742616033755274, |
|
"grad_norm": 0.4066050350666046, |
|
"learning_rate": 0.0015, |
|
"loss": 1.344, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.27848101265822783, |
|
"grad_norm": 0.39231425523757935, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3571, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.2795358649789029, |
|
"grad_norm": 0.47960364818573, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3528, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.2805907172995781, |
|
"grad_norm": 0.4317481219768524, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3449, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.28164556962025317, |
|
"grad_norm": 0.3781212568283081, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3504, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.28270042194092826, |
|
"grad_norm": 0.38963305950164795, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3496, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.28375527426160335, |
|
"grad_norm": 0.4155701696872711, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3472, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.2848101265822785, |
|
"grad_norm": 0.38998645544052124, |
|
"learning_rate": 0.0015, |
|
"loss": 1.342, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.2858649789029536, |
|
"grad_norm": 0.40188562870025635, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3501, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.2869198312236287, |
|
"grad_norm": 0.38737237453460693, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3491, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.2879746835443038, |
|
"grad_norm": 0.40319275856018066, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3301, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.2890295358649789, |
|
"grad_norm": 0.39979711174964905, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3415, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.290084388185654, |
|
"grad_norm": 0.4038510322570801, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3488, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.2911392405063291, |
|
"grad_norm": 0.5032958984375, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3563, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.2921940928270042, |
|
"grad_norm": 0.432725727558136, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3524, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.29324894514767935, |
|
"grad_norm": 0.4258490204811096, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3465, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.29430379746835444, |
|
"grad_norm": 0.370568186044693, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3436, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.29535864978902954, |
|
"grad_norm": 0.3846338093280792, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3406, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.29641350210970463, |
|
"grad_norm": 0.38603997230529785, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3419, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.2974683544303797, |
|
"grad_norm": 0.4352133870124817, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3495, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.29852320675105487, |
|
"grad_norm": 0.45890146493911743, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3247, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.29957805907172996, |
|
"grad_norm": 0.45596638321876526, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3293, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.30063291139240506, |
|
"grad_norm": 0.4347798526287079, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3335, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.30168776371308015, |
|
"grad_norm": 0.39894723892211914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3373, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.3027426160337553, |
|
"grad_norm": 0.4256788194179535, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3404, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 0.40725162625312805, |
|
"learning_rate": 0.0015, |
|
"loss": 1.326, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.3048523206751055, |
|
"grad_norm": 0.3742321729660034, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3286, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.3059071729957806, |
|
"grad_norm": 0.4294082224369049, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3371, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.3069620253164557, |
|
"grad_norm": 0.38926073908805847, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3369, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.3080168776371308, |
|
"grad_norm": 0.3792055547237396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3318, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.3090717299578059, |
|
"grad_norm": 0.3738500475883484, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3283, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.310126582278481, |
|
"grad_norm": 0.3772472143173218, |
|
"learning_rate": 0.0015, |
|
"loss": 1.33, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.3111814345991561, |
|
"grad_norm": 0.35470521450042725, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3417, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.31223628691983124, |
|
"grad_norm": 0.4564476013183594, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3269, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.31329113924050633, |
|
"grad_norm": 0.42535507678985596, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3218, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.3143459915611814, |
|
"grad_norm": 0.4032561779022217, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3237, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.3154008438818565, |
|
"grad_norm": 0.39573147892951965, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3367, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.31645569620253167, |
|
"grad_norm": 0.369488924741745, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3198, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.31751054852320676, |
|
"grad_norm": 0.3686826527118683, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3266, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.31856540084388185, |
|
"grad_norm": 0.4304807484149933, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3289, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.31962025316455694, |
|
"grad_norm": 0.4005890190601349, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3159, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.3206751054852321, |
|
"grad_norm": 0.4065329134464264, |
|
"learning_rate": 0.0015, |
|
"loss": 1.323, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.3217299578059072, |
|
"grad_norm": 0.37223920226097107, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3209, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.3227848101265823, |
|
"grad_norm": 0.357913076877594, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3307, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.32383966244725737, |
|
"grad_norm": 0.4006459712982178, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3211, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.32489451476793246, |
|
"grad_norm": 0.37744468450546265, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3126, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.3259493670886076, |
|
"grad_norm": 0.3618320822715759, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3311, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.3270042194092827, |
|
"grad_norm": 0.38559070229530334, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3167, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.3280590717299578, |
|
"grad_norm": 0.4073399007320404, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3192, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.3291139240506329, |
|
"grad_norm": 0.3995159864425659, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3211, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.33016877637130804, |
|
"grad_norm": 0.40881553292274475, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3339, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.33122362869198313, |
|
"grad_norm": 0.4172428548336029, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3081, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.3322784810126582, |
|
"grad_norm": 0.38847237825393677, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3214, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.3866778314113617, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3199, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.33438818565400846, |
|
"grad_norm": 0.3850679397583008, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3249, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.33544303797468356, |
|
"grad_norm": 0.415362149477005, |
|
"learning_rate": 0.0015, |
|
"loss": 1.318, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.33649789029535865, |
|
"grad_norm": 0.41383853554725647, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3173, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.33755274261603374, |
|
"grad_norm": 0.39325055480003357, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3133, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.33860759493670883, |
|
"grad_norm": 0.4082551598548889, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3242, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.339662447257384, |
|
"grad_norm": 0.3817073106765747, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3168, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.3407172995780591, |
|
"grad_norm": 0.3971477746963501, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3096, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.34177215189873417, |
|
"grad_norm": 0.3851396441459656, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2907, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.34282700421940926, |
|
"grad_norm": 0.36164918541908264, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3179, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.3438818565400844, |
|
"grad_norm": 0.3581262230873108, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3071, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.3449367088607595, |
|
"grad_norm": 0.4046621322631836, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3231, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.3459915611814346, |
|
"grad_norm": 0.45322754979133606, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3222, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.3470464135021097, |
|
"grad_norm": 0.47892409563064575, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3055, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.34810126582278483, |
|
"grad_norm": 0.3846244812011719, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3126, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.3491561181434599, |
|
"grad_norm": 0.36716458201408386, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3135, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.350210970464135, |
|
"grad_norm": 0.37936967611312866, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3194, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.3512658227848101, |
|
"grad_norm": 0.36913326382637024, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3094, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.35232067510548526, |
|
"grad_norm": 0.40605461597442627, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3109, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.35337552742616035, |
|
"grad_norm": 0.3609521985054016, |
|
"learning_rate": 0.0015, |
|
"loss": 1.304, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.35443037974683544, |
|
"grad_norm": 0.3889778256416321, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3039, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.35548523206751054, |
|
"grad_norm": 0.4098104238510132, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3144, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.35654008438818563, |
|
"grad_norm": 0.37357568740844727, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3078, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.3575949367088608, |
|
"grad_norm": 0.39619654417037964, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3039, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.35864978902953587, |
|
"grad_norm": 0.4084376394748688, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3099, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.35970464135021096, |
|
"grad_norm": 0.4111672043800354, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3097, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.36075949367088606, |
|
"grad_norm": 0.4097122550010681, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2917, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.3618143459915612, |
|
"grad_norm": 0.4278287887573242, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3061, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.3628691983122363, |
|
"grad_norm": 0.3902808725833893, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3039, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.3639240506329114, |
|
"grad_norm": 0.4041774868965149, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3011, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.3649789029535865, |
|
"grad_norm": 0.43103742599487305, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2968, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.36603375527426163, |
|
"grad_norm": 0.39118364453315735, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3109, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.3670886075949367, |
|
"grad_norm": 0.3834078013896942, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3154, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.3681434599156118, |
|
"grad_norm": 0.37766724824905396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3072, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.3691983122362869, |
|
"grad_norm": 0.37089774012565613, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2862, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.370253164556962, |
|
"grad_norm": 0.39285215735435486, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3097, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.37130801687763715, |
|
"grad_norm": 0.4295338988304138, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3058, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.37236286919831224, |
|
"grad_norm": 0.38859865069389343, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3126, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.37341772151898733, |
|
"grad_norm": 0.40371015667915344, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3021, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.3744725738396624, |
|
"grad_norm": 0.3651468753814697, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2968, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.3755274261603376, |
|
"grad_norm": 0.36767125129699707, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2971, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.37658227848101267, |
|
"grad_norm": 0.4188666045665741, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2942, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.37763713080168776, |
|
"grad_norm": 0.481037437915802, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2977, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.37869198312236285, |
|
"grad_norm": 0.4443666636943817, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3077, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.379746835443038, |
|
"grad_norm": 0.36201488971710205, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2909, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.3808016877637131, |
|
"grad_norm": 0.3435400426387787, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2924, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.3818565400843882, |
|
"grad_norm": 0.38419613242149353, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2967, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.3829113924050633, |
|
"grad_norm": 0.4411642253398895, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2929, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.38396624472573837, |
|
"grad_norm": 0.3975316882133484, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2966, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.3850210970464135, |
|
"grad_norm": 0.3638233244419098, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3035, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.3860759493670886, |
|
"grad_norm": 0.3786154091358185, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2938, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.3871308016877637, |
|
"grad_norm": 0.3656991422176361, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2899, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.3881856540084388, |
|
"grad_norm": 0.5419597029685974, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2922, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.38924050632911394, |
|
"grad_norm": 0.4152900278568268, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3027, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.39029535864978904, |
|
"grad_norm": 0.3799019455909729, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3026, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.39135021097046413, |
|
"grad_norm": 0.3744471073150635, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2933, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.3924050632911392, |
|
"grad_norm": 0.3584083020687103, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2744, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.39345991561181437, |
|
"grad_norm": 0.375611811876297, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2936, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.39451476793248946, |
|
"grad_norm": 0.3821758031845093, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2904, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.39556962025316456, |
|
"grad_norm": 0.3981541097164154, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2987, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.39662447257383965, |
|
"grad_norm": 0.3971460461616516, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2851, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.39767932489451474, |
|
"grad_norm": 0.3641221225261688, |
|
"learning_rate": 0.0015, |
|
"loss": 1.3026, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.3987341772151899, |
|
"grad_norm": 0.3688564896583557, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2854, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.399789029535865, |
|
"grad_norm": 0.4649963080883026, |
|
"learning_rate": 0.0015, |
|
"loss": 1.293, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.4008438818565401, |
|
"grad_norm": 0.43506956100463867, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2983, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.40189873417721517, |
|
"grad_norm": 0.3600989282131195, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2861, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.4029535864978903, |
|
"grad_norm": 0.35615450143814087, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2795, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.4040084388185654, |
|
"grad_norm": 0.3604353368282318, |
|
"learning_rate": 0.0015, |
|
"loss": 1.284, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 0.37614142894744873, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2985, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.4061181434599156, |
|
"grad_norm": 0.3707175850868225, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2803, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.40717299578059074, |
|
"grad_norm": 0.40577250719070435, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2921, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.40822784810126583, |
|
"grad_norm": 0.38110923767089844, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2858, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.4092827004219409, |
|
"grad_norm": 0.3657989203929901, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2752, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.410337552742616, |
|
"grad_norm": 0.37295961380004883, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2743, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.41139240506329117, |
|
"grad_norm": 0.5426948070526123, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2808, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.41244725738396626, |
|
"grad_norm": 0.398045152425766, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2877, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.41350210970464135, |
|
"grad_norm": 0.37474942207336426, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2795, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.41455696202531644, |
|
"grad_norm": 0.38445577025413513, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2844, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.41561181434599154, |
|
"grad_norm": 0.4166354835033417, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2782, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.3484591543674469, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2835, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.4177215189873418, |
|
"grad_norm": 0.3584078252315521, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2837, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.41877637130801687, |
|
"grad_norm": 0.4460738003253937, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2736, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.41983122362869196, |
|
"grad_norm": 0.37689462304115295, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2851, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.4208860759493671, |
|
"grad_norm": 0.3761577904224396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.276, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.4219409282700422, |
|
"grad_norm": 0.35418757796287537, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2882, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4229957805907173, |
|
"grad_norm": 0.4295203387737274, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2772, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.4240506329113924, |
|
"grad_norm": 0.37253305315971375, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2799, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.42510548523206754, |
|
"grad_norm": 0.36368170380592346, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2734, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.42616033755274263, |
|
"grad_norm": 0.37631523609161377, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2793, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.4272151898734177, |
|
"grad_norm": 0.36368027329444885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.282, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.4282700421940928, |
|
"grad_norm": 0.4057527184486389, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2864, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.4293248945147679, |
|
"grad_norm": 0.34539783000946045, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2814, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.43037974683544306, |
|
"grad_norm": 0.5057688355445862, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2873, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.43143459915611815, |
|
"grad_norm": 0.430195689201355, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2817, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.43248945147679324, |
|
"grad_norm": 0.38058942556381226, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2782, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.43354430379746833, |
|
"grad_norm": 0.4111412763595581, |
|
"learning_rate": 0.0015, |
|
"loss": 1.27, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.4345991561181435, |
|
"grad_norm": 0.38134413957595825, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2816, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.4356540084388186, |
|
"grad_norm": 0.37636449933052063, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2669, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.43670886075949367, |
|
"grad_norm": 0.3791978657245636, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2718, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.43776371308016876, |
|
"grad_norm": 0.37329211831092834, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2758, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.4388185654008439, |
|
"grad_norm": 0.37318155169487, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2715, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.439873417721519, |
|
"grad_norm": 0.3652787506580353, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2693, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.4409282700421941, |
|
"grad_norm": 0.3359285295009613, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2671, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.4419831223628692, |
|
"grad_norm": 0.3559111952781677, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2723, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.4430379746835443, |
|
"grad_norm": 0.3659558892250061, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2766, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.4440928270042194, |
|
"grad_norm": 0.3589211404323578, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2669, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.4451476793248945, |
|
"grad_norm": 0.39184415340423584, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2767, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.4462025316455696, |
|
"grad_norm": 0.47252312302589417, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2736, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.4472573839662447, |
|
"grad_norm": 0.3448552191257477, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2692, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.44831223628691985, |
|
"grad_norm": 0.39452487230300903, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2731, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.44936708860759494, |
|
"grad_norm": 0.4135473072528839, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2796, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.45042194092827004, |
|
"grad_norm": 0.358565092086792, |
|
"learning_rate": 0.0015, |
|
"loss": 1.268, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.45147679324894513, |
|
"grad_norm": 0.39048680663108826, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2626, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.4525316455696203, |
|
"grad_norm": 0.3444206416606903, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2733, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.45358649789029537, |
|
"grad_norm": 0.3811909556388855, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2723, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.45464135021097046, |
|
"grad_norm": 0.35479670763015747, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2697, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.45569620253164556, |
|
"grad_norm": 0.38825368881225586, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2671, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.45675105485232065, |
|
"grad_norm": 0.3594970405101776, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2745, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.4578059071729958, |
|
"grad_norm": 0.35297471284866333, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2685, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.4588607594936709, |
|
"grad_norm": 0.43033990263938904, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2673, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.459915611814346, |
|
"grad_norm": 0.33862191438674927, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2839, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.4609704641350211, |
|
"grad_norm": 0.3441391587257385, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2593, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.4620253164556962, |
|
"grad_norm": 0.3779299855232239, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2654, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.4630801687763713, |
|
"grad_norm": 0.3549061119556427, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2625, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.4641350210970464, |
|
"grad_norm": 0.3566148281097412, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2677, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.4651898734177215, |
|
"grad_norm": 0.3833557367324829, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2799, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.46624472573839665, |
|
"grad_norm": 0.36007896065711975, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2666, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.46729957805907174, |
|
"grad_norm": 0.39347776770591736, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2693, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.46835443037974683, |
|
"grad_norm": 0.40295612812042236, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2634, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.4694092827004219, |
|
"grad_norm": 0.3890196979045868, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2742, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.4704641350210971, |
|
"grad_norm": 0.35265377163887024, |
|
"learning_rate": 0.0015, |
|
"loss": 1.258, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.47151898734177217, |
|
"grad_norm": 0.373200923204422, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2676, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.47257383966244726, |
|
"grad_norm": 0.3729031980037689, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2732, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.47362869198312235, |
|
"grad_norm": 0.35276147723197937, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2612, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.47468354430379744, |
|
"grad_norm": 0.4667707085609436, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2615, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.4757383966244726, |
|
"grad_norm": 0.3366076648235321, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2664, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.4767932489451477, |
|
"grad_norm": 0.40432390570640564, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2498, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.4778481012658228, |
|
"grad_norm": 0.3696410655975342, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2675, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.47890295358649787, |
|
"grad_norm": 0.36451849341392517, |
|
"learning_rate": 0.0015, |
|
"loss": 1.268, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.479957805907173, |
|
"grad_norm": 0.4523102343082428, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2688, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.4810126582278481, |
|
"grad_norm": 0.3984118103981018, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2572, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.4820675105485232, |
|
"grad_norm": 0.3475066125392914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2389, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.4831223628691983, |
|
"grad_norm": 0.4492590129375458, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2604, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.48417721518987344, |
|
"grad_norm": 0.398572713136673, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2507, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.48523206751054854, |
|
"grad_norm": 0.3685084581375122, |
|
"learning_rate": 0.0015, |
|
"loss": 1.269, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.48628691983122363, |
|
"grad_norm": 0.4924905598163605, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2708, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.4873417721518987, |
|
"grad_norm": 0.38425061106681824, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2492, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.4883966244725738, |
|
"grad_norm": 0.3547668755054474, |
|
"learning_rate": 0.0015, |
|
"loss": 1.264, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.48945147679324896, |
|
"grad_norm": 0.3492022156715393, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2669, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.49050632911392406, |
|
"grad_norm": 0.35016942024230957, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2664, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.49156118143459915, |
|
"grad_norm": 0.3903471827507019, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2573, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.49261603375527424, |
|
"grad_norm": 0.37766721844673157, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2571, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.4936708860759494, |
|
"grad_norm": 0.35377028584480286, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2515, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.4947257383966245, |
|
"grad_norm": 0.47878357768058777, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2506, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.4957805907172996, |
|
"grad_norm": 0.3595276176929474, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2587, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.49683544303797467, |
|
"grad_norm": 0.37340301275253296, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2588, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.4978902953586498, |
|
"grad_norm": 0.4304148554801941, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2574, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.4989451476793249, |
|
"grad_norm": 0.3435005843639374, |
|
"learning_rate": 0.0015, |
|
"loss": 1.253, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.36592572927474976, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2501, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.5010548523206751, |
|
"grad_norm": 0.371332585811615, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2721, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.5021097046413502, |
|
"grad_norm": 0.3589191734790802, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2573, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.5031645569620253, |
|
"grad_norm": 0.3282475769519806, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2555, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.5042194092827004, |
|
"grad_norm": 0.34423360228538513, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2645, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.5052742616033755, |
|
"grad_norm": 0.42229223251342773, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2485, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 0.36169490218162537, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2495, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.5073839662447257, |
|
"grad_norm": 0.3373130261898041, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2513, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.5084388185654009, |
|
"grad_norm": 0.35851961374282837, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2716, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.509493670886076, |
|
"grad_norm": 0.38497915863990784, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2553, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.510548523206751, |
|
"grad_norm": 0.3687344491481781, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2483, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.5116033755274262, |
|
"grad_norm": 0.3644031286239624, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2622, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.5126582278481012, |
|
"grad_norm": 0.38985204696655273, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2502, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.5137130801687764, |
|
"grad_norm": 0.4006396234035492, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2447, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.5147679324894515, |
|
"grad_norm": 0.36055195331573486, |
|
"learning_rate": 0.0015, |
|
"loss": 1.263, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.5158227848101266, |
|
"grad_norm": 0.3749941289424896, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2526, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.5168776371308017, |
|
"grad_norm": 0.3642890751361847, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2617, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.5179324894514767, |
|
"grad_norm": 0.37513530254364014, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2548, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.5189873417721519, |
|
"grad_norm": 0.41637057065963745, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2468, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.520042194092827, |
|
"grad_norm": 0.4796326160430908, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2583, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.5210970464135021, |
|
"grad_norm": 0.35974326729774475, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2535, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.5221518987341772, |
|
"grad_norm": 0.37178534269332886, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2599, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.5232067510548524, |
|
"grad_norm": 0.349430650472641, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2439, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.5242616033755274, |
|
"grad_norm": 0.395673543214798, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2508, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.5253164556962026, |
|
"grad_norm": 0.40750932693481445, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2449, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.5263713080168776, |
|
"grad_norm": 0.3674182891845703, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2476, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.5274261603375527, |
|
"grad_norm": 0.3393429219722748, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2561, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5284810126582279, |
|
"grad_norm": 0.33547094464302063, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2501, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.5295358649789029, |
|
"grad_norm": 0.36183106899261475, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2452, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.5305907172995781, |
|
"grad_norm": 0.3869287371635437, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2315, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.5316455696202531, |
|
"grad_norm": 0.36629679799079895, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2587, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.5327004219409283, |
|
"grad_norm": 0.349203884601593, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2467, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.5337552742616034, |
|
"grad_norm": 0.3906978666782379, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2572, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.5348101265822784, |
|
"grad_norm": 0.4080146253108978, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2486, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.5358649789029536, |
|
"grad_norm": 0.45458248257637024, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2591, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.5369198312236287, |
|
"grad_norm": 0.39543619751930237, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2524, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.5379746835443038, |
|
"grad_norm": 0.4424404799938202, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2367, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.5390295358649789, |
|
"grad_norm": 0.3873385488986969, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2489, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.540084388185654, |
|
"grad_norm": 0.33514106273651123, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2461, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.5411392405063291, |
|
"grad_norm": 0.38259291648864746, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2454, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.5421940928270043, |
|
"grad_norm": 0.3605073094367981, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2585, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.5432489451476793, |
|
"grad_norm": 0.40182405710220337, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2523, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.5443037974683544, |
|
"grad_norm": 0.4059754014015198, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2488, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.5453586497890295, |
|
"grad_norm": 0.3528928756713867, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2469, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.5464135021097046, |
|
"grad_norm": 0.4284350574016571, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2564, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.5474683544303798, |
|
"grad_norm": 0.3745574951171875, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2419, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.5485232067510548, |
|
"grad_norm": 0.3614684045314789, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2527, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.54957805907173, |
|
"grad_norm": 0.4084944427013397, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2394, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.5506329113924051, |
|
"grad_norm": 0.3417372405529022, |
|
"learning_rate": 0.0015, |
|
"loss": 1.248, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.5516877637130801, |
|
"grad_norm": 0.35009732842445374, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2431, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.5527426160337553, |
|
"grad_norm": 0.37051039934158325, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2475, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.5537974683544303, |
|
"grad_norm": 0.44351595640182495, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2384, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.5548523206751055, |
|
"grad_norm": 0.3997798264026642, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2459, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.5559071729957806, |
|
"grad_norm": 0.4093949794769287, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2505, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.5569620253164557, |
|
"grad_norm": 0.457400381565094, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2443, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.5580168776371308, |
|
"grad_norm": 0.35332190990448, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2468, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.5590717299578059, |
|
"grad_norm": 0.3771302103996277, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2499, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.560126582278481, |
|
"grad_norm": 0.47953101992607117, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2372, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.5611814345991561, |
|
"grad_norm": 0.3351249098777771, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2373, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.5622362869198312, |
|
"grad_norm": 0.35240134596824646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2459, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.5632911392405063, |
|
"grad_norm": 0.35376760363578796, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2396, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.5643459915611815, |
|
"grad_norm": 0.36500561237335205, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2361, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.5654008438818565, |
|
"grad_norm": 0.45256397128105164, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2449, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.5664556962025317, |
|
"grad_norm": 0.3955960273742676, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2435, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.5675105485232067, |
|
"grad_norm": 0.40095025300979614, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2355, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.5685654008438819, |
|
"grad_norm": 0.3567625880241394, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2386, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.569620253164557, |
|
"grad_norm": 0.36974769830703735, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2374, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.570675105485232, |
|
"grad_norm": 0.3549600839614868, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2426, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.5717299578059072, |
|
"grad_norm": 0.34809744358062744, |
|
"learning_rate": 0.0015, |
|
"loss": 1.236, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.5727848101265823, |
|
"grad_norm": 0.3890848755836487, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2304, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.5738396624472574, |
|
"grad_norm": 0.3576195538043976, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2281, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.5748945147679325, |
|
"grad_norm": 0.4001421630382538, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2365, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.5759493670886076, |
|
"grad_norm": 0.36096686124801636, |
|
"learning_rate": 0.0015, |
|
"loss": 1.24, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.5770042194092827, |
|
"grad_norm": 0.42172369360923767, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2386, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.5780590717299579, |
|
"grad_norm": 0.3547919690608978, |
|
"learning_rate": 0.0015, |
|
"loss": 1.241, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.5791139240506329, |
|
"grad_norm": 0.3332633674144745, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2467, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.580168776371308, |
|
"grad_norm": 0.37135013937950134, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2368, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.5812236286919831, |
|
"grad_norm": 0.4235054850578308, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2446, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.5822784810126582, |
|
"grad_norm": 0.36324357986450195, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2353, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 0.5144487023353577, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2329, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.5843881856540084, |
|
"grad_norm": 0.3375086784362793, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2295, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.5854430379746836, |
|
"grad_norm": 0.3670271337032318, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2448, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.5864978902953587, |
|
"grad_norm": 0.3790842890739441, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2506, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.5875527426160337, |
|
"grad_norm": 0.36140161752700806, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2344, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.5886075949367089, |
|
"grad_norm": 0.40632885694503784, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2327, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.5896624472573839, |
|
"grad_norm": 0.40224945545196533, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2375, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.5907172995780591, |
|
"grad_norm": 0.3521060347557068, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2327, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.5917721518987342, |
|
"grad_norm": 0.3723829686641693, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2371, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.5928270042194093, |
|
"grad_norm": 0.3640119731426239, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2431, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.5938818565400844, |
|
"grad_norm": 0.41383010149002075, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2391, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.5949367088607594, |
|
"grad_norm": 0.3435527980327606, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2479, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.5959915611814346, |
|
"grad_norm": 0.32942721247673035, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2407, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.5970464135021097, |
|
"grad_norm": 0.47213542461395264, |
|
"learning_rate": 0.0015, |
|
"loss": 1.239, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.5981012658227848, |
|
"grad_norm": 0.34918421506881714, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2427, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.5991561181434599, |
|
"grad_norm": 0.34842443466186523, |
|
"learning_rate": 0.0015, |
|
"loss": 1.244, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.6002109704641351, |
|
"grad_norm": 0.3522525727748871, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2239, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.6012658227848101, |
|
"grad_norm": 0.3676183223724365, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2302, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.6023206751054853, |
|
"grad_norm": 0.3557111322879791, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2345, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.6033755274261603, |
|
"grad_norm": 0.37497225403785706, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2355, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.6044303797468354, |
|
"grad_norm": 0.35429346561431885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2347, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.6054852320675106, |
|
"grad_norm": 0.3666691780090332, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2334, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.6065400843881856, |
|
"grad_norm": 0.34536102414131165, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2097, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 0.3289570212364197, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2217, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.6086497890295358, |
|
"grad_norm": 0.39452025294303894, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2228, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.609704641350211, |
|
"grad_norm": 0.3545337915420532, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2295, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.6107594936708861, |
|
"grad_norm": 0.3574298024177551, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2271, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.6118143459915611, |
|
"grad_norm": 0.3354833126068115, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2293, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.6128691983122363, |
|
"grad_norm": 0.35068947076797485, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2348, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.6139240506329114, |
|
"grad_norm": 0.38195034861564636, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2494, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.6149789029535865, |
|
"grad_norm": 0.3787460923194885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2297, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.6160337552742616, |
|
"grad_norm": 0.38402631878852844, |
|
"learning_rate": 0.0015, |
|
"loss": 1.243, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.6170886075949367, |
|
"grad_norm": 0.40364325046539307, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2472, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.6181434599156118, |
|
"grad_norm": 0.36070314049720764, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2321, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.619198312236287, |
|
"grad_norm": 0.4334326684474945, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2288, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.620253164556962, |
|
"grad_norm": 0.3435908257961273, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2362, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.6213080168776371, |
|
"grad_norm": 0.3393274247646332, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2285, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.6223628691983122, |
|
"grad_norm": 0.3442111015319824, |
|
"learning_rate": 0.0015, |
|
"loss": 1.23, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.6234177215189873, |
|
"grad_norm": 0.3644740581512451, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2248, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.6244725738396625, |
|
"grad_norm": 0.3478515148162842, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2182, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.6255274261603375, |
|
"grad_norm": 0.47216373682022095, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2252, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.6265822784810127, |
|
"grad_norm": 0.3889217674732208, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2331, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.6276371308016878, |
|
"grad_norm": 0.3746896982192993, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2218, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.6286919831223629, |
|
"grad_norm": 0.337329238653183, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2327, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.629746835443038, |
|
"grad_norm": 0.45176035165786743, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2321, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.630801687763713, |
|
"grad_norm": 0.34400928020477295, |
|
"learning_rate": 0.0015, |
|
"loss": 1.216, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.6318565400843882, |
|
"grad_norm": 0.4225354790687561, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2325, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.6329113924050633, |
|
"grad_norm": 0.3400496244430542, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2411, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.6339662447257384, |
|
"grad_norm": 0.3938896358013153, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2299, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.6350210970464135, |
|
"grad_norm": 0.3652991056442261, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2299, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.6360759493670886, |
|
"grad_norm": 0.38011661171913147, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2129, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.6371308016877637, |
|
"grad_norm": 0.34210604429244995, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2329, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.6381856540084389, |
|
"grad_norm": 0.352566123008728, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2176, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.6392405063291139, |
|
"grad_norm": 0.34896865487098694, |
|
"learning_rate": 0.0015, |
|
"loss": 1.233, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.640295358649789, |
|
"grad_norm": 0.4462093710899353, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2256, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.6413502109704642, |
|
"grad_norm": 0.35512223839759827, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2257, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.6424050632911392, |
|
"grad_norm": 0.3445747196674347, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2241, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.6434599156118144, |
|
"grad_norm": 0.3473874628543854, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2186, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.6445147679324894, |
|
"grad_norm": 0.35191580653190613, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2311, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.6455696202531646, |
|
"grad_norm": 0.33353883028030396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2265, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.6466244725738397, |
|
"grad_norm": 0.44887515902519226, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2262, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.6476793248945147, |
|
"grad_norm": 0.39968711137771606, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2253, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.6487341772151899, |
|
"grad_norm": 0.3577297627925873, |
|
"learning_rate": 0.0015, |
|
"loss": 1.2358, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.6497890295358649, |
|
"grad_norm": 0.4742908179759979, |
|
"learning_rate": 0.0015, |
|
"loss": 1.23, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.6508438818565401, |
|
"grad_norm": 0.4291151463985443, |
|
"learning_rate": 0.0014834368975312174, |
|
"loss": 1.2079, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.6518987341772152, |
|
"grad_norm": 0.38173770904541016, |
|
"learning_rate": 0.0014629899726345957, |
|
"loss": 1.2299, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.6529535864978903, |
|
"grad_norm": 0.37623050808906555, |
|
"learning_rate": 0.0014428248775471316, |
|
"loss": 1.2305, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.6540084388185654, |
|
"grad_norm": 0.38231006264686584, |
|
"learning_rate": 0.00142293772767289, |
|
"loss": 1.2213, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.6550632911392406, |
|
"grad_norm": 0.3671601116657257, |
|
"learning_rate": 0.001403324691959192, |
|
"loss": 1.2156, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.6561181434599156, |
|
"grad_norm": 0.3565635085105896, |
|
"learning_rate": 0.0013839819921586025, |
|
"loss": 1.2229, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.6571729957805907, |
|
"grad_norm": 0.3567785918712616, |
|
"learning_rate": 0.0013649059021010894, |
|
"loss": 1.2105, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.6582278481012658, |
|
"grad_norm": 0.4190845489501953, |
|
"learning_rate": 0.0013460927469762154, |
|
"loss": 1.2122, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.6592827004219409, |
|
"grad_norm": 0.3603142201900482, |
|
"learning_rate": 0.0013275389026252255, |
|
"loss": 1.2185, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.6603375527426161, |
|
"grad_norm": 0.3586927652359009, |
|
"learning_rate": 0.0013092407948428887, |
|
"loss": 1.2087, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.6613924050632911, |
|
"grad_norm": 0.3322678804397583, |
|
"learning_rate": 0.001291194898688966, |
|
"loss": 1.2146, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.6624472573839663, |
|
"grad_norm": 0.3324128985404968, |
|
"learning_rate": 0.001273397737809166, |
|
"loss": 1.2128, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.6635021097046413, |
|
"grad_norm": 0.34206292033195496, |
|
"learning_rate": 0.001255845883765463, |
|
"loss": 1.21, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.6645569620253164, |
|
"grad_norm": 0.33811911940574646, |
|
"learning_rate": 0.001238535955375642, |
|
"loss": 1.2004, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.6656118143459916, |
|
"grad_norm": 0.357466459274292, |
|
"learning_rate": 0.0012214646180619506, |
|
"loss": 1.2015, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.34239062666893005, |
|
"learning_rate": 0.001204628583208727, |
|
"loss": 1.1962, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.6677215189873418, |
|
"grad_norm": 0.34912315011024475, |
|
"learning_rate": 0.0011880246075288827, |
|
"loss": 1.2038, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.6687763713080169, |
|
"grad_norm": 0.3250739872455597, |
|
"learning_rate": 0.001171649492439115, |
|
"loss": 1.1965, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.669831223628692, |
|
"grad_norm": 0.335197776556015, |
|
"learning_rate": 0.0011555000834437364, |
|
"loss": 1.1989, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.6708860759493671, |
|
"grad_norm": 0.3212112486362457, |
|
"learning_rate": 0.0011395732695269908, |
|
"loss": 1.1966, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.6719409282700421, |
|
"grad_norm": 0.31406673789024353, |
|
"learning_rate": 0.0011238659825537505, |
|
"loss": 1.1773, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.6729957805907173, |
|
"grad_norm": 0.37119054794311523, |
|
"learning_rate": 0.0011083751966784717, |
|
"loss": 1.18, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.6740506329113924, |
|
"grad_norm": 0.4086954593658447, |
|
"learning_rate": 0.0010930979277622953, |
|
"loss": 1.1948, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.6751054852320675, |
|
"grad_norm": 0.40815266966819763, |
|
"learning_rate": 0.0010780312327981854, |
|
"loss": 1.1939, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.6761603375527426, |
|
"grad_norm": 0.35664618015289307, |
|
"learning_rate": 0.0010631722093439888, |
|
"loss": 1.1888, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.6772151898734177, |
|
"grad_norm": 0.3347793519496918, |
|
"learning_rate": 0.00104851799496331, |
|
"loss": 1.1786, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.6782700421940928, |
|
"grad_norm": 0.33713704347610474, |
|
"learning_rate": 0.0010340657666740914, |
|
"loss": 1.1852, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.679324894514768, |
|
"grad_norm": 0.40812811255455017, |
|
"learning_rate": 0.0010198127404047975, |
|
"loss": 1.1685, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.680379746835443, |
|
"grad_norm": 0.32857853174209595, |
|
"learning_rate": 0.0010057561704580897, |
|
"loss": 1.1727, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.6814345991561181, |
|
"grad_norm": 0.36213618516921997, |
|
"learning_rate": 0.0009918933489818985, |
|
"loss": 1.1856, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.6824894514767933, |
|
"grad_norm": 0.3340655267238617, |
|
"learning_rate": 0.0009782216054477827, |
|
"loss": 1.178, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.6835443037974683, |
|
"grad_norm": 0.36028996109962463, |
|
"learning_rate": 0.0009647383061364801, |
|
"loss": 1.1836, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.6845991561181435, |
|
"grad_norm": 0.32589006423950195, |
|
"learning_rate": 0.0009514408536305495, |
|
"loss": 1.1763, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.6856540084388185, |
|
"grad_norm": 0.36935076117515564, |
|
"learning_rate": 0.0009383266863140042, |
|
"loss": 1.1913, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.6867088607594937, |
|
"grad_norm": 0.35536494851112366, |
|
"learning_rate": 0.000925393277878844, |
|
"loss": 1.1876, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.6877637130801688, |
|
"grad_norm": 0.3353438079357147, |
|
"learning_rate": 0.0009126381368383879, |
|
"loss": 1.1738, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.6888185654008439, |
|
"grad_norm": 0.3449837267398834, |
|
"learning_rate": 0.0009000588060473156, |
|
"loss": 1.1637, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.689873417721519, |
|
"grad_norm": 0.32303208112716675, |
|
"learning_rate": 0.0008876528622283235, |
|
"loss": 1.1715, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.6909282700421941, |
|
"grad_norm": 0.3335954248905182, |
|
"learning_rate": 0.0008754179155053053, |
|
"loss": 1.1685, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.6919831223628692, |
|
"grad_norm": 0.3782043755054474, |
|
"learning_rate": 0.0008633516089429683, |
|
"loss": 1.1681, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.6930379746835443, |
|
"grad_norm": 0.35206338763237, |
|
"learning_rate": 0.0008514516180927928, |
|
"loss": 1.1643, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.6940928270042194, |
|
"grad_norm": 0.3493340313434601, |
|
"learning_rate": 0.0008397156505452524, |
|
"loss": 1.1596, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.6951476793248945, |
|
"grad_norm": 0.34636741876602173, |
|
"learning_rate": 0.0008281414454882051, |
|
"loss": 1.1601, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.6962025316455697, |
|
"grad_norm": 0.3398618996143341, |
|
"learning_rate": 0.0008167267732713704, |
|
"loss": 1.1666, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.6972573839662447, |
|
"grad_norm": 0.36243852972984314, |
|
"learning_rate": 0.0008054694349768117, |
|
"loss": 1.1518, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.6983122362869199, |
|
"grad_norm": 0.34618276357650757, |
|
"learning_rate": 0.0007943672619953359, |
|
"loss": 1.163, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.6993670886075949, |
|
"grad_norm": 0.3229001462459564, |
|
"learning_rate": 0.0007834181156087356, |
|
"loss": 1.1538, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.70042194092827, |
|
"grad_norm": 0.3970085084438324, |
|
"learning_rate": 0.0007726198865777852, |
|
"loss": 1.1541, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.7014767932489452, |
|
"grad_norm": 0.34208858013153076, |
|
"learning_rate": 0.0007619704947359191, |
|
"loss": 1.1508, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.7025316455696202, |
|
"grad_norm": 0.31160768866539, |
|
"learning_rate": 0.0007514678885885087, |
|
"loss": 1.1521, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.7035864978902954, |
|
"grad_norm": 0.3307741582393646, |
|
"learning_rate": 0.0007411100449176633, |
|
"loss": 1.1528, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.7046413502109705, |
|
"grad_norm": 0.32905980944633484, |
|
"learning_rate": 0.0007308949683924791, |
|
"loss": 1.1547, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.7056962025316456, |
|
"grad_norm": 0.3278239667415619, |
|
"learning_rate": 0.000720820691184658, |
|
"loss": 1.147, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.7067510548523207, |
|
"grad_norm": 0.32193559408187866, |
|
"learning_rate": 0.0007108852725894269, |
|
"loss": 1.1469, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.7078059071729957, |
|
"grad_norm": 0.34123092889785767, |
|
"learning_rate": 0.000701086798651681, |
|
"loss": 1.1485, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 0.3501998484134674, |
|
"learning_rate": 0.0006914233817972798, |
|
"loss": 1.1398, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.709915611814346, |
|
"grad_norm": 0.3438262939453125, |
|
"learning_rate": 0.0006818931604694261, |
|
"loss": 1.1436, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.7109704641350211, |
|
"grad_norm": 0.3316311836242676, |
|
"learning_rate": 0.0006724942987700563, |
|
"loss": 1.1487, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.7120253164556962, |
|
"grad_norm": 0.3469674289226532, |
|
"learning_rate": 0.0006632249861061732, |
|
"loss": 1.151, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.7130801687763713, |
|
"grad_norm": 0.34641310572624207, |
|
"learning_rate": 0.0006540834368410549, |
|
"loss": 1.1452, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.7141350210970464, |
|
"grad_norm": 0.33597779273986816, |
|
"learning_rate": 0.0006450678899502701, |
|
"loss": 1.1438, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.7151898734177216, |
|
"grad_norm": 0.3230700194835663, |
|
"learning_rate": 0.0006361766086824345, |
|
"loss": 1.1433, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.7162447257383966, |
|
"grad_norm": 0.32162338495254517, |
|
"learning_rate": 0.000627407880224645, |
|
"loss": 1.1462, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.7172995780590717, |
|
"grad_norm": 0.32030555605888367, |
|
"learning_rate": 0.0006187600153725225, |
|
"loss": 1.1339, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.7183544303797469, |
|
"grad_norm": 0.3984506130218506, |
|
"learning_rate": 0.0006102313482048055, |
|
"loss": 1.1385, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.7194092827004219, |
|
"grad_norm": 0.34766748547554016, |
|
"learning_rate": 0.0006018202357624274, |
|
"loss": 1.1386, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.7204641350210971, |
|
"grad_norm": 0.3280739486217499, |
|
"learning_rate": 0.0005935250577320168, |
|
"loss": 1.1369, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.7215189873417721, |
|
"grad_norm": 0.35947754979133606, |
|
"learning_rate": 0.0005853442161337618, |
|
"loss": 1.1279, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.7225738396624473, |
|
"grad_norm": 0.3208995759487152, |
|
"learning_rate": 0.0005772761350135759, |
|
"loss": 1.1348, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.7236286919831224, |
|
"grad_norm": 0.34295305609703064, |
|
"learning_rate": 0.0005693192601395058, |
|
"loss": 1.1285, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.7246835443037974, |
|
"grad_norm": 0.3107556402683258, |
|
"learning_rate": 0.000561472058702326, |
|
"loss": 1.1269, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.7257383966244726, |
|
"grad_norm": 0.31452247500419617, |
|
"learning_rate": 0.000553733019020258, |
|
"loss": 1.1349, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.7267932489451476, |
|
"grad_norm": 0.3466354012489319, |
|
"learning_rate": 0.0005461006502477612, |
|
"loss": 1.1244, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.7278481012658228, |
|
"grad_norm": 0.3526834547519684, |
|
"learning_rate": 0.0005385734820883369, |
|
"loss": 1.1238, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.7289029535864979, |
|
"grad_norm": 0.33901646733283997, |
|
"learning_rate": 0.0005311500645112907, |
|
"loss": 1.1459, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.729957805907173, |
|
"grad_norm": 0.3271549642086029, |
|
"learning_rate": 0.0005238289674723993, |
|
"loss": 1.1249, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.7310126582278481, |
|
"grad_norm": 0.330404132604599, |
|
"learning_rate": 0.0005166087806384274, |
|
"loss": 1.1319, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.7320675105485233, |
|
"grad_norm": 0.32270577549934387, |
|
"learning_rate": 0.0005094881131154418, |
|
"loss": 1.1339, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.7331223628691983, |
|
"grad_norm": 0.34502968192100525, |
|
"learning_rate": 0.0005024655931808696, |
|
"loss": 1.1294, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.7341772151898734, |
|
"grad_norm": 0.32282140851020813, |
|
"learning_rate": 0.0004955398680192508, |
|
"loss": 1.124, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.7352320675105485, |
|
"grad_norm": 0.33826205134391785, |
|
"learning_rate": 0.000488709603461632, |
|
"loss": 1.1189, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.7362869198312236, |
|
"grad_norm": 0.31764915585517883, |
|
"learning_rate": 0.000481973483728553, |
|
"loss": 1.1212, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.7373417721518988, |
|
"grad_norm": 0.3235630989074707, |
|
"learning_rate": 0.0004753302111765748, |
|
"loss": 1.1206, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.7383966244725738, |
|
"grad_norm": 0.3392946422100067, |
|
"learning_rate": 0.0004687785060483032, |
|
"loss": 1.1296, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.739451476793249, |
|
"grad_norm": 0.3291850984096527, |
|
"learning_rate": 0.0004623171062258558, |
|
"loss": 1.103, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.740506329113924, |
|
"grad_norm": 0.34219229221343994, |
|
"learning_rate": 0.0004559447669877288, |
|
"loss": 1.1152, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.7415611814345991, |
|
"grad_norm": 0.32017433643341064, |
|
"learning_rate": 0.00044966026076901413, |
|
"loss": 1.1238, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.7426160337552743, |
|
"grad_norm": 0.41779911518096924, |
|
"learning_rate": 0.00044346237692492177, |
|
"loss": 1.1272, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.7436708860759493, |
|
"grad_norm": 0.37291568517684937, |
|
"learning_rate": 0.0004373499214975615, |
|
"loss": 1.111, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.7447257383966245, |
|
"grad_norm": 0.4117240309715271, |
|
"learning_rate": 0.0004313217169859396, |
|
"loss": 1.1179, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.7457805907172996, |
|
"grad_norm": 0.3707019090652466, |
|
"learning_rate": 0.0004253766021191256, |
|
"loss": 1.1227, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.7468354430379747, |
|
"grad_norm": 0.3181743621826172, |
|
"learning_rate": 0.00041951343163254497, |
|
"loss": 1.1184, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.7478902953586498, |
|
"grad_norm": 0.33088555932044983, |
|
"learning_rate": 0.00041373107604735626, |
|
"loss": 1.1144, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.7489451476793249, |
|
"grad_norm": 0.3327450752258301, |
|
"learning_rate": 0.0004080284214528687, |
|
"loss": 1.1118, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.3445628881454468, |
|
"learning_rate": 0.0004024043692919589, |
|
"loss": 1.124, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.7510548523206751, |
|
"grad_norm": 0.36757397651672363, |
|
"learning_rate": 0.0003968578361494449, |
|
"loss": 1.1146, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.7521097046413502, |
|
"grad_norm": 0.3530139923095703, |
|
"learning_rate": 0.000391387753543378, |
|
"loss": 1.1237, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.7531645569620253, |
|
"grad_norm": 0.33693066239356995, |
|
"learning_rate": 0.00038599306771921023, |
|
"loss": 1.109, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.7542194092827004, |
|
"grad_norm": 0.3698440492153168, |
|
"learning_rate": 0.0003806727394468004, |
|
"loss": 1.1047, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.7552742616033755, |
|
"grad_norm": 0.3456558883190155, |
|
"learning_rate": 0.0003754257438202162, |
|
"loss": 1.1114, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.7563291139240507, |
|
"grad_norm": 0.3305872082710266, |
|
"learning_rate": 0.0003702510700602974, |
|
"loss": 1.1189, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.7573839662447257, |
|
"grad_norm": 0.33361759781837463, |
|
"learning_rate": 0.0003651477213199393, |
|
"loss": 1.0986, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.7584388185654009, |
|
"grad_norm": 0.32957959175109863, |
|
"learning_rate": 0.000360114714492061, |
|
"loss": 1.1006, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 0.3591114580631256, |
|
"learning_rate": 0.0003551510800202195, |
|
"loss": 1.1035, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.760548523206751, |
|
"grad_norm": 0.39091235399246216, |
|
"learning_rate": 0.0003502558617118353, |
|
"loss": 1.1091, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.7616033755274262, |
|
"grad_norm": 0.32055342197418213, |
|
"learning_rate": 0.0003454281165539914, |
|
"loss": 1.121, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.7626582278481012, |
|
"grad_norm": 0.35132747888565063, |
|
"learning_rate": 0.00034066691453177176, |
|
"loss": 1.1166, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.7637130801687764, |
|
"grad_norm": 0.31827104091644287, |
|
"learning_rate": 0.0003359713384491037, |
|
"loss": 1.1138, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.7647679324894515, |
|
"grad_norm": 0.3282644748687744, |
|
"learning_rate": 0.00033134048375206944, |
|
"loss": 1.1073, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.7658227848101266, |
|
"grad_norm": 0.32072553038597107, |
|
"learning_rate": 0.0003267734583546536, |
|
"loss": 1.1065, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.7668776371308017, |
|
"grad_norm": 0.36000901460647583, |
|
"learning_rate": 0.00032226938246689157, |
|
"loss": 1.1056, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.7679324894514767, |
|
"grad_norm": 0.33699488639831543, |
|
"learning_rate": 0.0003178273884253874, |
|
"loss": 1.1058, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.7689873417721519, |
|
"grad_norm": 0.33975517749786377, |
|
"learning_rate": 0.0003134466205261674, |
|
"loss": 1.1156, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.770042194092827, |
|
"grad_norm": 0.3478587567806244, |
|
"learning_rate": 0.0003091262348598378, |
|
"loss": 1.1191, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.7710970464135021, |
|
"grad_norm": 0.32292914390563965, |
|
"learning_rate": 0.0003048653991490141, |
|
"loss": 1.0999, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.7721518987341772, |
|
"grad_norm": 0.32401716709136963, |
|
"learning_rate": 0.00030066329258799187, |
|
"loss": 1.0977, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.7732067510548524, |
|
"grad_norm": 0.34817883372306824, |
|
"learning_rate": 0.0002965191056846266, |
|
"loss": 1.1029, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.7742616033755274, |
|
"grad_norm": 0.3278037905693054, |
|
"learning_rate": 0.000292432040104394, |
|
"loss": 1.0939, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.7753164556962026, |
|
"grad_norm": 0.3233949542045593, |
|
"learning_rate": 0.00028840130851659853, |
|
"loss": 1.0947, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.7763713080168776, |
|
"grad_norm": 0.32060080766677856, |
|
"learning_rate": 0.0002844261344427028, |
|
"loss": 1.1021, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.7774261603375527, |
|
"grad_norm": 0.33821195363998413, |
|
"learning_rate": 0.0002805057521067471, |
|
"loss": 1.1042, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.7784810126582279, |
|
"grad_norm": 0.33443766832351685, |
|
"learning_rate": 0.00027663940628783017, |
|
"loss": 1.0892, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.7795358649789029, |
|
"grad_norm": 0.34301623702049255, |
|
"learning_rate": 0.00027282635217462393, |
|
"loss": 1.1006, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.7805907172995781, |
|
"grad_norm": 0.3341234028339386, |
|
"learning_rate": 0.0002690658552218937, |
|
"loss": 1.1055, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.7816455696202531, |
|
"grad_norm": 0.31503307819366455, |
|
"learning_rate": 0.00026535719100899516, |
|
"loss": 1.0884, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.7827004219409283, |
|
"grad_norm": 0.33604514598846436, |
|
"learning_rate": 0.00026169964510032245, |
|
"loss": 1.0909, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.7837552742616034, |
|
"grad_norm": 0.3207562565803528, |
|
"learning_rate": 0.00025809251290767984, |
|
"loss": 1.0814, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.7848101265822784, |
|
"grad_norm": 0.3265933394432068, |
|
"learning_rate": 0.00025453509955454957, |
|
"loss": 1.0837, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.7858649789029536, |
|
"grad_norm": 0.3195817768573761, |
|
"learning_rate": 0.00025102671974223175, |
|
"loss": 1.0861, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.7869198312236287, |
|
"grad_norm": 0.34826865792274475, |
|
"learning_rate": 0.00024756669761782815, |
|
"loss": 1.0951, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.7879746835443038, |
|
"grad_norm": 0.3280653655529022, |
|
"learning_rate": 0.0002441543666440464, |
|
"loss": 1.0878, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.7890295358649789, |
|
"grad_norm": 0.3230268657207489, |
|
"learning_rate": 0.00024078906947079878, |
|
"loss": 1.094, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.790084388185654, |
|
"grad_norm": 0.3275749385356903, |
|
"learning_rate": 0.00023747015780857005, |
|
"loss": 1.0986, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.7911392405063291, |
|
"grad_norm": 0.3295617401599884, |
|
"learning_rate": 0.00023419699230353144, |
|
"loss": 1.0968, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.7921940928270043, |
|
"grad_norm": 0.3250296115875244, |
|
"learning_rate": 0.00023096894241437586, |
|
"loss": 1.1005, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.7932489451476793, |
|
"grad_norm": 0.3130958676338196, |
|
"learning_rate": 0.00022778538629085056, |
|
"loss": 1.0857, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.7943037974683544, |
|
"grad_norm": 0.32801106572151184, |
|
"learning_rate": 0.00022464571065396427, |
|
"loss": 1.0861, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.7953586497890295, |
|
"grad_norm": 0.3705176115036011, |
|
"learning_rate": 0.00022154931067784521, |
|
"loss": 1.085, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.7964135021097046, |
|
"grad_norm": 0.34427809715270996, |
|
"learning_rate": 0.00021849558987322782, |
|
"loss": 1.082, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.7974683544303798, |
|
"grad_norm": 0.3411599397659302, |
|
"learning_rate": 0.0002154839599725452, |
|
"loss": 1.0857, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.7985232067510548, |
|
"grad_norm": 0.3404884934425354, |
|
"learning_rate": 0.00021251384081660544, |
|
"loss": 1.0935, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.79957805907173, |
|
"grad_norm": 0.31883886456489563, |
|
"learning_rate": 0.0002095846602428303, |
|
"loss": 1.0889, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.8006329113924051, |
|
"grad_norm": 0.3307037949562073, |
|
"learning_rate": 0.00020669585397503358, |
|
"loss": 1.0802, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.8016877637130801, |
|
"grad_norm": 0.3246137201786041, |
|
"learning_rate": 0.0002038468655147195, |
|
"loss": 1.0874, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.8027426160337553, |
|
"grad_norm": 0.32273244857788086, |
|
"learning_rate": 0.00020103714603387894, |
|
"loss": 1.0986, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.8037974683544303, |
|
"grad_norm": 0.31461453437805176, |
|
"learning_rate": 0.00019826615426926338, |
|
"loss": 1.0736, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.8048523206751055, |
|
"grad_norm": 0.34340304136276245, |
|
"learning_rate": 0.00019553335641811625, |
|
"loss": 1.0947, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.8059071729957806, |
|
"grad_norm": 0.3382534086704254, |
|
"learning_rate": 0.0001928382260353415, |
|
"loss": 1.0853, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.8069620253164557, |
|
"grad_norm": 0.3437410891056061, |
|
"learning_rate": 0.00019018024393208902, |
|
"loss": 1.102, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.8080168776371308, |
|
"grad_norm": 0.32129502296447754, |
|
"learning_rate": 0.00018755889807573872, |
|
"loss": 1.0771, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.8090717299578059, |
|
"grad_norm": 0.32240381836891174, |
|
"learning_rate": 0.00018497368349126262, |
|
"loss": 1.0863, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 0.3478579819202423, |
|
"learning_rate": 0.00018242410216394648, |
|
"loss": 1.0964, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.8111814345991561, |
|
"grad_norm": 0.3518509268760681, |
|
"learning_rate": 0.0001799096629434529, |
|
"loss": 1.0721, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.8122362869198312, |
|
"grad_norm": 0.32270342111587524, |
|
"learning_rate": 0.00017742988144920578, |
|
"loss": 1.0828, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.8132911392405063, |
|
"grad_norm": 0.323770135641098, |
|
"learning_rate": 0.00017498427997707976, |
|
"loss": 1.0796, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.8143459915611815, |
|
"grad_norm": 0.33255451917648315, |
|
"learning_rate": 0.00017257238740737548, |
|
"loss": 1.0837, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.8154008438818565, |
|
"grad_norm": 0.35608723759651184, |
|
"learning_rate": 0.00017019373911406307, |
|
"loss": 1.0903, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.8164556962025317, |
|
"grad_norm": 0.31979310512542725, |
|
"learning_rate": 0.000167847876875277, |
|
"loss": 1.0904, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.8175105485232067, |
|
"grad_norm": 0.35860174894332886, |
|
"learning_rate": 0.00016553434878504428, |
|
"loss": 1.078, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.8185654008438819, |
|
"grad_norm": 0.3339579701423645, |
|
"learning_rate": 0.00016325270916622947, |
|
"loss": 1.074, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.819620253164557, |
|
"grad_norm": 0.33170056343078613, |
|
"learning_rate": 0.00016100251848467966, |
|
"loss": 1.0821, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.820675105485232, |
|
"grad_norm": 0.3272804021835327, |
|
"learning_rate": 0.0001587833432645528, |
|
"loss": 1.0775, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.8217299578059072, |
|
"grad_norm": 0.3446336090564728, |
|
"learning_rate": 0.00015659475600481292, |
|
"loss": 1.0926, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.8227848101265823, |
|
"grad_norm": 0.3152468502521515, |
|
"learning_rate": 0.00015443633509687688, |
|
"loss": 1.0805, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.8238396624472574, |
|
"grad_norm": 0.3231417238712311, |
|
"learning_rate": 0.00015230766474339536, |
|
"loss": 1.0813, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.8248945147679325, |
|
"grad_norm": 0.32221952080726624, |
|
"learning_rate": 0.00015020833487815416, |
|
"loss": 1.0871, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.8259493670886076, |
|
"grad_norm": 0.3186984360218048, |
|
"learning_rate": 0.0001481379410870792, |
|
"loss": 1.079, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.8270042194092827, |
|
"grad_norm": 0.3271889090538025, |
|
"learning_rate": 0.00014609608453033013, |
|
"loss": 1.0644, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.8280590717299579, |
|
"grad_norm": 0.34038275480270386, |
|
"learning_rate": 0.00014408237186546807, |
|
"loss": 1.072, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.8291139240506329, |
|
"grad_norm": 0.3195663094520569, |
|
"learning_rate": 0.00014209641517168273, |
|
"loss": 1.0609, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.830168776371308, |
|
"grad_norm": 0.3180614709854126, |
|
"learning_rate": 0.00014013783187506265, |
|
"loss": 1.0764, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.8312236286919831, |
|
"grad_norm": 0.33012938499450684, |
|
"learning_rate": 0.00013820624467489697, |
|
"loss": 1.0924, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.8322784810126582, |
|
"grad_norm": 0.3188948631286621, |
|
"learning_rate": 0.00013630128147099213, |
|
"loss": 1.0883, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.3357279896736145, |
|
"learning_rate": 0.00013442257529199068, |
|
"loss": 1.0711, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.8343881856540084, |
|
"grad_norm": 0.32860061526298523, |
|
"learning_rate": 0.00013256976422467803, |
|
"loss": 1.0801, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.8354430379746836, |
|
"grad_norm": 0.33959388732910156, |
|
"learning_rate": 0.00013074249134426366, |
|
"loss": 1.0766, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.8364978902953587, |
|
"grad_norm": 0.323219895362854, |
|
"learning_rate": 0.0001289404046456233, |
|
"loss": 1.0863, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.8375527426160337, |
|
"grad_norm": 0.32049325108528137, |
|
"learning_rate": 0.0001271631569754887, |
|
"loss": 1.0798, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.8386075949367089, |
|
"grad_norm": 0.3425719141960144, |
|
"learning_rate": 0.0001254104059655723, |
|
"loss": 1.0843, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.8396624472573839, |
|
"grad_norm": 0.31871697306632996, |
|
"learning_rate": 0.00012368181396661337, |
|
"loss": 1.0694, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.8407172995780591, |
|
"grad_norm": 0.3283820152282715, |
|
"learning_rate": 0.00012197704798333364, |
|
"loss": 1.0705, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.8417721518987342, |
|
"grad_norm": 0.31667330861091614, |
|
"learning_rate": 0.00012029577961028894, |
|
"loss": 1.0704, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.8428270042194093, |
|
"grad_norm": 0.36018213629722595, |
|
"learning_rate": 0.00011863768496860542, |
|
"loss": 1.0803, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.8438818565400844, |
|
"grad_norm": 0.3193598985671997, |
|
"learning_rate": 0.00011700244464358777, |
|
"loss": 1.0763, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.8449367088607594, |
|
"grad_norm": 0.33096808195114136, |
|
"learning_rate": 0.00011538974362318715, |
|
"loss": 1.0784, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.8459915611814346, |
|
"grad_norm": 0.3247699737548828, |
|
"learning_rate": 0.00011379927123731737, |
|
"loss": 1.0745, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.8470464135021097, |
|
"grad_norm": 0.33818915486335754, |
|
"learning_rate": 0.0001122307210980077, |
|
"loss": 1.0732, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.8481012658227848, |
|
"grad_norm": 0.3458991050720215, |
|
"learning_rate": 0.00011068379104038026, |
|
"loss": 1.0837, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.8491561181434599, |
|
"grad_norm": 0.35434862971305847, |
|
"learning_rate": 0.00010915818306444116, |
|
"loss": 1.0648, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.8502109704641351, |
|
"grad_norm": 0.3178817331790924, |
|
"learning_rate": 0.00010765360327767384, |
|
"loss": 1.0742, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.8512658227848101, |
|
"grad_norm": 0.3386595547199249, |
|
"learning_rate": 0.00010616976183842376, |
|
"loss": 1.078, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.8523206751054853, |
|
"grad_norm": 0.333050400018692, |
|
"learning_rate": 0.00010470637290006365, |
|
"loss": 1.0806, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.8533755274261603, |
|
"grad_norm": 0.3198642432689667, |
|
"learning_rate": 0.00010326315455592764, |
|
"loss": 1.0683, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.8544303797468354, |
|
"grad_norm": 0.3254936933517456, |
|
"learning_rate": 0.0001018398287850053, |
|
"loss": 1.0654, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.8554852320675106, |
|
"grad_norm": 0.3687814176082611, |
|
"learning_rate": 0.00010043612139838357, |
|
"loss": 1.0796, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.8565400843881856, |
|
"grad_norm": 0.34864723682403564, |
|
"learning_rate": 9.905176198642719e-05, |
|
"loss": 1.0716, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.8575949367088608, |
|
"grad_norm": 0.3522048890590668, |
|
"learning_rate": 9.76864838666871e-05, |
|
"loss": 1.0719, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.8586497890295358, |
|
"grad_norm": 0.3265816867351532, |
|
"learning_rate": 9.634002403252676e-05, |
|
"loss": 1.0686, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.859704641350211, |
|
"grad_norm": 0.3169366419315338, |
|
"learning_rate": 9.501212310245681e-05, |
|
"loss": 1.0684, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.8607594936708861, |
|
"grad_norm": 0.33043038845062256, |
|
"learning_rate": 9.370252527016777e-05, |
|
"loss": 1.0774, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.8618143459915611, |
|
"grad_norm": 0.3303966820240021, |
|
"learning_rate": 9.241097825525163e-05, |
|
"loss": 1.0683, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.8628691983122363, |
|
"grad_norm": 0.33609864115715027, |
|
"learning_rate": 9.113723325460276e-05, |
|
"loss": 1.0735, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.8639240506329114, |
|
"grad_norm": 0.3414512574672699, |
|
"learning_rate": 8.988104489448849e-05, |
|
"loss": 1.0685, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.8649789029535865, |
|
"grad_norm": 0.31755390763282776, |
|
"learning_rate": 8.864217118328042e-05, |
|
"loss": 1.0803, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.8660337552742616, |
|
"grad_norm": 0.3173295557498932, |
|
"learning_rate": 8.742037346483729e-05, |
|
"loss": 1.0733, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.8670886075949367, |
|
"grad_norm": 0.3303743302822113, |
|
"learning_rate": 8.62154163725303e-05, |
|
"loss": 1.0794, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.8681434599156118, |
|
"grad_norm": 0.3222305178642273, |
|
"learning_rate": 8.502706778390219e-05, |
|
"loss": 1.0764, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.869198312236287, |
|
"grad_norm": 0.3168421685695648, |
|
"learning_rate": 8.38550987759513e-05, |
|
"loss": 1.0713, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.870253164556962, |
|
"grad_norm": 0.3277452290058136, |
|
"learning_rate": 8.269928358103191e-05, |
|
"loss": 1.0841, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.8713080168776371, |
|
"grad_norm": 0.31893619894981384, |
|
"learning_rate": 8.155939954336243e-05, |
|
"loss": 1.0776, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.8723628691983122, |
|
"grad_norm": 0.3293255865573883, |
|
"learning_rate": 8.043522707613312e-05, |
|
"loss": 1.0716, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.8734177215189873, |
|
"grad_norm": 0.3259051740169525, |
|
"learning_rate": 7.932654961920486e-05, |
|
"loss": 1.058, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.8744725738396625, |
|
"grad_norm": 0.34142494201660156, |
|
"learning_rate": 7.823315359739135e-05, |
|
"loss": 1.0615, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.8755274261603375, |
|
"grad_norm": 0.31675177812576294, |
|
"learning_rate": 7.715482837931577e-05, |
|
"loss": 1.0818, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.8765822784810127, |
|
"grad_norm": 0.32128244638442993, |
|
"learning_rate": 7.6091366236835e-05, |
|
"loss": 1.0615, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.8776371308016878, |
|
"grad_norm": 0.33591708540916443, |
|
"learning_rate": 7.504256230502289e-05, |
|
"loss": 1.0801, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.8786919831223629, |
|
"grad_norm": 0.3197396397590637, |
|
"learning_rate": 7.400821454270524e-05, |
|
"loss": 1.0713, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.879746835443038, |
|
"grad_norm": 0.32733380794525146, |
|
"learning_rate": 7.29881236935386e-05, |
|
"loss": 1.0613, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.880801687763713, |
|
"grad_norm": 0.3151637017726898, |
|
"learning_rate": 7.198209324762562e-05, |
|
"loss": 1.0643, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.8818565400843882, |
|
"grad_norm": 0.3231916129589081, |
|
"learning_rate": 7.098992940365946e-05, |
|
"loss": 1.0613, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.8829113924050633, |
|
"grad_norm": 0.3338322937488556, |
|
"learning_rate": 7.001144103159e-05, |
|
"loss": 1.0692, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.8839662447257384, |
|
"grad_norm": 0.3360345661640167, |
|
"learning_rate": 6.904643963580461e-05, |
|
"loss": 1.0743, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.8850210970464135, |
|
"grad_norm": 0.3391522169113159, |
|
"learning_rate": 6.809473931881644e-05, |
|
"loss": 1.0661, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.8860759493670886, |
|
"grad_norm": 0.32648056745529175, |
|
"learning_rate": 6.71561567454532e-05, |
|
"loss": 1.0661, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.8871308016877637, |
|
"grad_norm": 0.3194596767425537, |
|
"learning_rate": 6.623051110753948e-05, |
|
"loss": 1.0827, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.8881856540084389, |
|
"grad_norm": 0.31810277700424194, |
|
"learning_rate": 6.531762408906607e-05, |
|
"loss": 1.0726, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.8892405063291139, |
|
"grad_norm": 0.3305910527706146, |
|
"learning_rate": 6.441731983183912e-05, |
|
"loss": 1.0709, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.890295358649789, |
|
"grad_norm": 0.32806795835494995, |
|
"learning_rate": 6.352942490160292e-05, |
|
"loss": 1.0623, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.8913502109704642, |
|
"grad_norm": 0.333575040102005, |
|
"learning_rate": 6.265376825462966e-05, |
|
"loss": 1.0708, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.8924050632911392, |
|
"grad_norm": 0.35335201025009155, |
|
"learning_rate": 6.179018120476945e-05, |
|
"loss": 1.0659, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.8934599156118144, |
|
"grad_norm": 0.3264673948287964, |
|
"learning_rate": 6.0938497390954946e-05, |
|
"loss": 1.0669, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.8945147679324894, |
|
"grad_norm": 0.3392159938812256, |
|
"learning_rate": 6.009855274515339e-05, |
|
"loss": 1.0623, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.8955696202531646, |
|
"grad_norm": 0.3224976360797882, |
|
"learning_rate": 5.9270185460760735e-05, |
|
"loss": 1.0693, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.8966244725738397, |
|
"grad_norm": 0.33346402645111084, |
|
"learning_rate": 5.8453235961431225e-05, |
|
"loss": 1.0586, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.8976793248945147, |
|
"grad_norm": 0.32544925808906555, |
|
"learning_rate": 5.764754687033678e-05, |
|
"loss": 1.0688, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.8987341772151899, |
|
"grad_norm": 0.33190402388572693, |
|
"learning_rate": 5.6852962979849836e-05, |
|
"loss": 1.0658, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.8997890295358649, |
|
"grad_norm": 0.3238281011581421, |
|
"learning_rate": 5.6069331221644284e-05, |
|
"loss": 1.0782, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.9008438818565401, |
|
"grad_norm": 0.32378917932510376, |
|
"learning_rate": 5.529650063720842e-05, |
|
"loss": 1.0722, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.9018987341772152, |
|
"grad_norm": 0.3402920663356781, |
|
"learning_rate": 5.453432234876445e-05, |
|
"loss": 1.066, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.9029535864978903, |
|
"grad_norm": 0.32455769181251526, |
|
"learning_rate": 5.37826495305886e-05, |
|
"loss": 1.0591, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.9040084388185654, |
|
"grad_norm": 0.3707723319530487, |
|
"learning_rate": 5.304133738072674e-05, |
|
"loss": 1.0771, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.9050632911392406, |
|
"grad_norm": 0.3264203369617462, |
|
"learning_rate": 5.2310243093099814e-05, |
|
"loss": 1.0685, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.9061181434599156, |
|
"grad_norm": 0.343057245016098, |
|
"learning_rate": 5.158922582999368e-05, |
|
"loss": 1.0724, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.9071729957805907, |
|
"grad_norm": 0.32586669921875, |
|
"learning_rate": 5.087814669492819e-05, |
|
"loss": 1.0629, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.9082278481012658, |
|
"grad_norm": 0.33987531065940857, |
|
"learning_rate": 5.017686870590028e-05, |
|
"loss": 1.0668, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.9092827004219409, |
|
"grad_norm": 0.32024064660072327, |
|
"learning_rate": 4.948525676899577e-05, |
|
"loss": 1.0642, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.9103375527426161, |
|
"grad_norm": 0.33926844596862793, |
|
"learning_rate": 4.880317765236493e-05, |
|
"loss": 1.0714, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 0.36164024472236633, |
|
"learning_rate": 4.8130499960556755e-05, |
|
"loss": 1.0544, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.9124472573839663, |
|
"grad_norm": 0.3245364725589752, |
|
"learning_rate": 4.746709410920699e-05, |
|
"loss": 1.0571, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.9135021097046413, |
|
"grad_norm": 0.3439021408557892, |
|
"learning_rate": 4.681283230007507e-05, |
|
"loss": 1.0616, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.9145569620253164, |
|
"grad_norm": 0.3390875458717346, |
|
"learning_rate": 4.616758849642509e-05, |
|
"loss": 1.0684, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.9156118143459916, |
|
"grad_norm": 0.3170855641365051, |
|
"learning_rate": 4.553123839874615e-05, |
|
"loss": 1.076, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 0.324824720621109, |
|
"learning_rate": 4.490365942080736e-05, |
|
"loss": 1.0691, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.9177215189873418, |
|
"grad_norm": 0.3236071467399597, |
|
"learning_rate": 4.428473066604285e-05, |
|
"loss": 1.0678, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.9187763713080169, |
|
"grad_norm": 0.33034569025039673, |
|
"learning_rate": 4.367433290426233e-05, |
|
"loss": 1.0675, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.919831223628692, |
|
"grad_norm": 0.3257054388523102, |
|
"learning_rate": 4.3072348548682595e-05, |
|
"loss": 1.0606, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.9208860759493671, |
|
"grad_norm": 0.31744372844696045, |
|
"learning_rate": 4.247866163327575e-05, |
|
"loss": 1.0726, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.9219409282700421, |
|
"grad_norm": 0.33041128516197205, |
|
"learning_rate": 4.1893157790429404e-05, |
|
"loss": 1.0634, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.9229957805907173, |
|
"grad_norm": 0.3342299461364746, |
|
"learning_rate": 4.1315724228915066e-05, |
|
"loss": 1.0609, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.9240506329113924, |
|
"grad_norm": 0.32655617594718933, |
|
"learning_rate": 4.074624971216005e-05, |
|
"loss": 1.0516, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.9251054852320675, |
|
"grad_norm": 0.32039201259613037, |
|
"learning_rate": 4.018462453681889e-05, |
|
"loss": 1.0596, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.9261603375527426, |
|
"grad_norm": 0.33058446645736694, |
|
"learning_rate": 3.963074051164014e-05, |
|
"loss": 1.0648, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.9272151898734177, |
|
"grad_norm": 0.32546499371528625, |
|
"learning_rate": 3.908449093662446e-05, |
|
"loss": 1.0598, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.9282700421940928, |
|
"grad_norm": 0.3228848874568939, |
|
"learning_rate": 3.854577058246998e-05, |
|
"loss": 1.0557, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.929324894514768, |
|
"grad_norm": 0.33862391114234924, |
|
"learning_rate": 3.801447567030094e-05, |
|
"loss": 1.076, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.930379746835443, |
|
"grad_norm": 0.3227675259113312, |
|
"learning_rate": 3.7490503851675777e-05, |
|
"loss": 1.0605, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.9314345991561181, |
|
"grad_norm": 0.3217218816280365, |
|
"learning_rate": 3.6973754188870806e-05, |
|
"loss": 1.0786, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.9324894514767933, |
|
"grad_norm": 0.3212469220161438, |
|
"learning_rate": 3.6464127135435536e-05, |
|
"loss": 1.066, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.9335443037974683, |
|
"grad_norm": 0.3378359377384186, |
|
"learning_rate": 3.596152451701616e-05, |
|
"loss": 1.0651, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.9345991561181435, |
|
"grad_norm": 0.31813666224479675, |
|
"learning_rate": 3.5465849512443226e-05, |
|
"loss": 1.0633, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.9356540084388185, |
|
"grad_norm": 0.32426491379737854, |
|
"learning_rate": 3.4977006635080086e-05, |
|
"loss": 1.064, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.9367088607594937, |
|
"grad_norm": 0.34550321102142334, |
|
"learning_rate": 3.449490171442838e-05, |
|
"loss": 1.0705, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.9377637130801688, |
|
"grad_norm": 0.3262031674385071, |
|
"learning_rate": 3.401944187798702e-05, |
|
"loss": 1.0669, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.9388185654008439, |
|
"grad_norm": 0.31900161504745483, |
|
"learning_rate": 3.355053553336137e-05, |
|
"loss": 1.058, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.939873417721519, |
|
"grad_norm": 0.3401828706264496, |
|
"learning_rate": 3.308809235061882e-05, |
|
"loss": 1.0604, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.9409282700421941, |
|
"grad_norm": 0.33225199580192566, |
|
"learning_rate": 3.263202324488772e-05, |
|
"loss": 1.0653, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.9419831223628692, |
|
"grad_norm": 0.32263875007629395, |
|
"learning_rate": 3.218224035919609e-05, |
|
"loss": 1.0632, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.9430379746835443, |
|
"grad_norm": 0.32119113206863403, |
|
"learning_rate": 3.173865704754688e-05, |
|
"loss": 1.0623, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.9440928270042194, |
|
"grad_norm": 0.3211652338504791, |
|
"learning_rate": 3.130118785822657e-05, |
|
"loss": 1.0641, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.9451476793248945, |
|
"grad_norm": 0.3391672372817993, |
|
"learning_rate": 3.08697485173437e-05, |
|
"loss": 1.0631, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.9462025316455697, |
|
"grad_norm": 0.3661402761936188, |
|
"learning_rate": 3.0444255912594442e-05, |
|
"loss": 1.0639, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.9472573839662447, |
|
"grad_norm": 0.35915690660476685, |
|
"learning_rate": 3.002462807725185e-05, |
|
"loss": 1.0599, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.9483122362869199, |
|
"grad_norm": 0.32106488943099976, |
|
"learning_rate": 2.9610784174375868e-05, |
|
"loss": 1.0659, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.9493670886075949, |
|
"grad_norm": 0.32112917304039, |
|
"learning_rate": 2.920264448124087e-05, |
|
"loss": 1.0639, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.95042194092827, |
|
"grad_norm": 0.32968273758888245, |
|
"learning_rate": 2.8800130373977936e-05, |
|
"loss": 1.0543, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.9514767932489452, |
|
"grad_norm": 0.338516503572464, |
|
"learning_rate": 2.84031643124288e-05, |
|
"loss": 1.0592, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.9525316455696202, |
|
"grad_norm": 0.330265074968338, |
|
"learning_rate": 2.8011669825208517e-05, |
|
"loss": 1.0776, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.9535864978902954, |
|
"grad_norm": 0.3306685984134674, |
|
"learning_rate": 2.762557149497405e-05, |
|
"loss": 1.0539, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.9546413502109705, |
|
"grad_norm": 0.3367357850074768, |
|
"learning_rate": 2.724479494389592e-05, |
|
"loss": 1.063, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.9556962025316456, |
|
"grad_norm": 0.31615322828292847, |
|
"learning_rate": 2.6869266819330058e-05, |
|
"loss": 1.0667, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.9567510548523207, |
|
"grad_norm": 0.3239525556564331, |
|
"learning_rate": 2.6498914779687228e-05, |
|
"loss": 1.0685, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.9578059071729957, |
|
"grad_norm": 0.33848342299461365, |
|
"learning_rate": 2.6133667480497115e-05, |
|
"loss": 1.0673, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.9588607594936709, |
|
"grad_norm": 0.32886168360710144, |
|
"learning_rate": 2.5773454560664597e-05, |
|
"loss": 1.0668, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.959915611814346, |
|
"grad_norm": 0.3179469704627991, |
|
"learning_rate": 2.541820662891541e-05, |
|
"loss": 1.0475, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.9609704641350211, |
|
"grad_norm": 0.3251250982284546, |
|
"learning_rate": 2.5067855250428616e-05, |
|
"loss": 1.0579, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.9620253164556962, |
|
"grad_norm": 0.3251715898513794, |
|
"learning_rate": 2.472233293365335e-05, |
|
"loss": 1.0719, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.9630801687763713, |
|
"grad_norm": 0.33614784479141235, |
|
"learning_rate": 2.4381573117307307e-05, |
|
"loss": 1.0542, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.9641350210970464, |
|
"grad_norm": 0.3413209021091461, |
|
"learning_rate": 2.4045510157554362e-05, |
|
"loss": 1.0709, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.9651898734177216, |
|
"grad_norm": 0.3312832713127136, |
|
"learning_rate": 2.3714079315358985e-05, |
|
"loss": 1.0591, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.9662447257383966, |
|
"grad_norm": 0.3249393701553345, |
|
"learning_rate": 2.338721674401494e-05, |
|
"loss": 1.0675, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.9672995780590717, |
|
"grad_norm": 0.32825586199760437, |
|
"learning_rate": 2.30648594768459e-05, |
|
"loss": 1.0653, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.9683544303797469, |
|
"grad_norm": 0.3269410729408264, |
|
"learning_rate": 2.2746945415075523e-05, |
|
"loss": 1.0617, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.9694092827004219, |
|
"grad_norm": 0.3209409713745117, |
|
"learning_rate": 2.2433413315864803e-05, |
|
"loss": 1.0578, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.9704641350210971, |
|
"grad_norm": 0.3257068395614624, |
|
"learning_rate": 2.2124202780514277e-05, |
|
"loss": 1.0625, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.9715189873417721, |
|
"grad_norm": 0.3229939043521881, |
|
"learning_rate": 2.1819254242828815e-05, |
|
"loss": 1.0616, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.9725738396624473, |
|
"grad_norm": 0.33057811856269836, |
|
"learning_rate": 2.151850895764285e-05, |
|
"loss": 1.0591, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.9736286919831224, |
|
"grad_norm": 0.305545449256897, |
|
"learning_rate": 2.12219089895037e-05, |
|
"loss": 1.0513, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.9746835443037974, |
|
"grad_norm": 0.32736852765083313, |
|
"learning_rate": 2.092939720151092e-05, |
|
"loss": 1.0479, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.9757383966244726, |
|
"grad_norm": 0.3169935643672943, |
|
"learning_rate": 2.064091724430947e-05, |
|
"loss": 1.0569, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.9767932489451476, |
|
"grad_norm": 0.3271348774433136, |
|
"learning_rate": 2.0356413545234603e-05, |
|
"loss": 1.0539, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.9778481012658228, |
|
"grad_norm": 0.31698349118232727, |
|
"learning_rate": 2.0075831297606357e-05, |
|
"loss": 1.0645, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.9789029535864979, |
|
"grad_norm": 0.3189639449119568, |
|
"learning_rate": 1.9799116450171627e-05, |
|
"loss": 1.0515, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.979957805907173, |
|
"grad_norm": 0.3284784257411957, |
|
"learning_rate": 1.952621569669175e-05, |
|
"loss": 1.0612, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.9810126582278481, |
|
"grad_norm": 0.32952404022216797, |
|
"learning_rate": 1.9257076465673605e-05, |
|
"loss": 1.0593, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.9820675105485233, |
|
"grad_norm": 0.32908713817596436, |
|
"learning_rate": 1.899164691024229e-05, |
|
"loss": 1.0613, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.9831223628691983, |
|
"grad_norm": 0.3283434808254242, |
|
"learning_rate": 1.872987589815331e-05, |
|
"loss": 1.0548, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.9841772151898734, |
|
"grad_norm": 0.33627134561538696, |
|
"learning_rate": 1.8471713001942538e-05, |
|
"loss": 1.0674, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.9852320675105485, |
|
"grad_norm": 0.3174121677875519, |
|
"learning_rate": 1.8217108489211845e-05, |
|
"loss": 1.065, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.9862869198312236, |
|
"grad_norm": 0.3188031315803528, |
|
"learning_rate": 1.7966013313048696e-05, |
|
"loss": 1.074, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.9873417721518988, |
|
"grad_norm": 0.3233587145805359, |
|
"learning_rate": 1.7718379102577752e-05, |
|
"loss": 1.0646, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.9883966244725738, |
|
"grad_norm": 0.32843634486198425, |
|
"learning_rate": 1.7474158153642745e-05, |
|
"loss": 1.0637, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.989451476793249, |
|
"grad_norm": 0.3318040668964386, |
|
"learning_rate": 1.7233303419616745e-05, |
|
"loss": 1.0532, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.990506329113924, |
|
"grad_norm": 0.343174546957016, |
|
"learning_rate": 1.699576850233916e-05, |
|
"loss": 1.0554, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.9915611814345991, |
|
"grad_norm": 0.3253111243247986, |
|
"learning_rate": 1.6761507643177553e-05, |
|
"loss": 1.0696, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.9926160337552743, |
|
"grad_norm": 0.3279666602611542, |
|
"learning_rate": 1.6530475714212752e-05, |
|
"loss": 1.046, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.9936708860759493, |
|
"grad_norm": 0.32378527522087097, |
|
"learning_rate": 1.6302628209545423e-05, |
|
"loss": 1.0631, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.9947257383966245, |
|
"grad_norm": 0.31868770718574524, |
|
"learning_rate": 1.6077921236722464e-05, |
|
"loss": 1.063, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.9957805907172996, |
|
"grad_norm": 0.323849618434906, |
|
"learning_rate": 1.5856311508281594e-05, |
|
"loss": 1.0585, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.9968354430379747, |
|
"grad_norm": 0.320055216550827, |
|
"learning_rate": 1.5637756333412454e-05, |
|
"loss": 1.0692, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.9978902953586498, |
|
"grad_norm": 0.32591259479522705, |
|
"learning_rate": 1.542221360973268e-05, |
|
"loss": 1.0535, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.9989451476793249, |
|
"grad_norm": 0.3283117413520813, |
|
"learning_rate": 1.5209641815177312e-05, |
|
"loss": 1.0675, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.9648054838180542, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.0556, |
|
"step": 9480 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 9480, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.037432118742016e+16, |
|
"train_batch_size": 1024, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|