|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 3898, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.7940189116741517, |
|
"learning_rate": 5.128205128205128e-07, |
|
"loss": 1.2901, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.8549670354423652, |
|
"learning_rate": 2.564102564102564e-06, |
|
"loss": 1.1983, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.5576375802504129, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 1.0245, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.659060642425238, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 1.059, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.5974849589264373, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 1.1426, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8598589383620435, |
|
"learning_rate": 1.282051282051282e-05, |
|
"loss": 1.0725, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.7508232225220164, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 1.1266, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.7318286527682681, |
|
"learning_rate": 1.794871794871795e-05, |
|
"loss": 0.8731, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.4400452199237334, |
|
"learning_rate": 2.0512820512820512e-05, |
|
"loss": 0.9001, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.3581907713644131, |
|
"learning_rate": 2.307692307692308e-05, |
|
"loss": 0.8492, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.4752977404545827, |
|
"learning_rate": 2.564102564102564e-05, |
|
"loss": 0.8143, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.47540987111824407, |
|
"learning_rate": 2.8205128205128207e-05, |
|
"loss": 0.7851, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.3486905588773928, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 0.7796, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.4379263022871022, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.7896, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.46208045268944636, |
|
"learning_rate": 3.58974358974359e-05, |
|
"loss": 0.8126, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.38482216190878704, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 0.7794, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.36177681119574645, |
|
"learning_rate": 4.1025641025641023e-05, |
|
"loss": 0.7081, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.4222837002069195, |
|
"learning_rate": 4.358974358974359e-05, |
|
"loss": 0.7807, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.37505732650970885, |
|
"learning_rate": 4.615384615384616e-05, |
|
"loss": 0.6835, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.5376404476864999, |
|
"learning_rate": 4.871794871794872e-05, |
|
"loss": 0.6775, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.33127308677570055, |
|
"learning_rate": 5.128205128205128e-05, |
|
"loss": 0.7702, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.30805670386128153, |
|
"learning_rate": 5.384615384615385e-05, |
|
"loss": 0.7311, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.3064832637009184, |
|
"learning_rate": 5.6410256410256414e-05, |
|
"loss": 0.7068, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.36300239603665035, |
|
"learning_rate": 5.897435897435898e-05, |
|
"loss": 0.8024, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.546235320721914, |
|
"learning_rate": 6.153846153846155e-05, |
|
"loss": 0.8347, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.4262756501942376, |
|
"learning_rate": 6.410256410256412e-05, |
|
"loss": 0.7733, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.31870027810040585, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.7072, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.5080881100002653, |
|
"learning_rate": 6.923076923076924e-05, |
|
"loss": 0.7419, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.45131101725332007, |
|
"learning_rate": 7.17948717948718e-05, |
|
"loss": 0.6721, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.4015662904289768, |
|
"learning_rate": 7.435897435897436e-05, |
|
"loss": 0.8274, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.30635979357766263, |
|
"learning_rate": 7.692307692307693e-05, |
|
"loss": 0.7501, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2563663658930907, |
|
"learning_rate": 7.948717948717948e-05, |
|
"loss": 0.6621, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3851611498390434, |
|
"learning_rate": 8.205128205128205e-05, |
|
"loss": 0.6981, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.32054712548206815, |
|
"learning_rate": 8.461538461538461e-05, |
|
"loss": 0.7648, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3604034886379123, |
|
"learning_rate": 8.717948717948718e-05, |
|
"loss": 0.721, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2978365211793088, |
|
"learning_rate": 8.974358974358975e-05, |
|
"loss": 0.7665, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.28010588660358143, |
|
"learning_rate": 9.230769230769232e-05, |
|
"loss": 0.6919, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3041954902881447, |
|
"learning_rate": 9.487179487179487e-05, |
|
"loss": 0.6963, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3624614794713746, |
|
"learning_rate": 9.743589743589744e-05, |
|
"loss": 0.7866, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.29529594264100556, |
|
"learning_rate": 0.0001, |
|
"loss": 0.7542, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.29313493488432457, |
|
"learning_rate": 0.00010256410256410256, |
|
"loss": 0.8368, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3309957222386564, |
|
"learning_rate": 0.00010512820512820514, |
|
"loss": 0.6755, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.39864413284171824, |
|
"learning_rate": 0.0001076923076923077, |
|
"loss": 0.7935, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3022145052274676, |
|
"learning_rate": 0.00011025641025641027, |
|
"loss": 0.77, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3188687614270182, |
|
"learning_rate": 0.00011282051282051283, |
|
"loss": 0.764, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.2771960353584736, |
|
"learning_rate": 0.00011538461538461538, |
|
"loss": 0.7025, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.34181074966932906, |
|
"learning_rate": 0.00011794871794871796, |
|
"loss": 0.7206, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.28461361041254846, |
|
"learning_rate": 0.00012051282051282052, |
|
"loss": 0.752, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.7008172335009641, |
|
"learning_rate": 0.0001230769230769231, |
|
"loss": 0.8087, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.21894206670691171, |
|
"learning_rate": 0.00012564102564102564, |
|
"loss": 0.7153, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.29593725228870665, |
|
"learning_rate": 0.00012820512820512823, |
|
"loss": 0.7168, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.2519334320641686, |
|
"learning_rate": 0.00013076923076923077, |
|
"loss": 0.7517, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.32763786349227375, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 0.6877, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3792543190404934, |
|
"learning_rate": 0.0001358974358974359, |
|
"loss": 0.7357, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.21121164349273067, |
|
"learning_rate": 0.00013846153846153847, |
|
"loss": 0.7251, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.25702919707855326, |
|
"learning_rate": 0.00014102564102564104, |
|
"loss": 0.728, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.2523953056455061, |
|
"learning_rate": 0.0001435897435897436, |
|
"loss": 0.6635, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.29400188903129665, |
|
"learning_rate": 0.00014615384615384615, |
|
"loss": 0.6737, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.28480340987225256, |
|
"learning_rate": 0.00014871794871794872, |
|
"loss": 0.6627, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6318376196782489, |
|
"learning_rate": 0.00015128205128205128, |
|
"loss": 0.7294, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.30039963893256977, |
|
"learning_rate": 0.00015384615384615385, |
|
"loss": 0.8311, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.28222145819043015, |
|
"learning_rate": 0.00015641025641025642, |
|
"loss": 0.7052, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.23714903546035276, |
|
"learning_rate": 0.00015897435897435896, |
|
"loss": 0.6598, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.24757276162013622, |
|
"learning_rate": 0.00016153846153846155, |
|
"loss": 0.7401, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.25140380741663676, |
|
"learning_rate": 0.0001641025641025641, |
|
"loss": 0.6531, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.30309172865038203, |
|
"learning_rate": 0.0001666666666666667, |
|
"loss": 0.6746, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.27958455276568867, |
|
"learning_rate": 0.00016923076923076923, |
|
"loss": 0.7966, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.23922247511746272, |
|
"learning_rate": 0.0001717948717948718, |
|
"loss": 0.8222, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.2525786116177449, |
|
"learning_rate": 0.00017435897435897436, |
|
"loss": 0.7323, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.2657739010257245, |
|
"learning_rate": 0.00017692307692307693, |
|
"loss": 0.7402, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.28236890518445834, |
|
"learning_rate": 0.0001794871794871795, |
|
"loss": 0.8582, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.33868871224317904, |
|
"learning_rate": 0.00018205128205128207, |
|
"loss": 0.7021, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3770994222569293, |
|
"learning_rate": 0.00018461538461538463, |
|
"loss": 0.6761, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4018353610014071, |
|
"learning_rate": 0.0001871794871794872, |
|
"loss": 0.7644, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.29301791588126386, |
|
"learning_rate": 0.00018974358974358974, |
|
"loss": 0.7497, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2525840897139569, |
|
"learning_rate": 0.00019230769230769233, |
|
"loss": 0.6905, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2282690464906889, |
|
"learning_rate": 0.00019487179487179487, |
|
"loss": 0.628, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.29122371361878807, |
|
"learning_rate": 0.00019743589743589744, |
|
"loss": 0.7005, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2641474899247467, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8084, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.21454542770751026, |
|
"learning_rate": 0.00019999899748734544, |
|
"loss": 0.7702, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.22924120222036806, |
|
"learning_rate": 0.00019999598996948235, |
|
"loss": 0.6556, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.32163392243239314, |
|
"learning_rate": 0.00019999097750671223, |
|
"loss": 0.731, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.25259388958326623, |
|
"learning_rate": 0.00019998396019953624, |
|
"loss": 0.6915, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.258567962978177, |
|
"learning_rate": 0.00019997493818865318, |
|
"loss": 0.7234, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.2743302123872774, |
|
"learning_rate": 0.0001999639116549566, |
|
"loss": 0.5963, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.26201706655388224, |
|
"learning_rate": 0.00019995088081953136, |
|
"loss": 0.7192, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.33838871762466366, |
|
"learning_rate": 0.00019993584594364894, |
|
"loss": 0.6471, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.24855520937031114, |
|
"learning_rate": 0.00019991880732876246, |
|
"loss": 0.7001, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.23979280174747938, |
|
"learning_rate": 0.0001998997653165004, |
|
"loss": 0.7427, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.2491200817907366, |
|
"learning_rate": 0.00019987872028866003, |
|
"loss": 0.8069, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2799825613310986, |
|
"learning_rate": 0.00019985567266719934, |
|
"loss": 0.7842, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2171357139053752, |
|
"learning_rate": 0.00019983062291422908, |
|
"loss": 0.7458, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2571958093636779, |
|
"learning_rate": 0.00019980357153200315, |
|
"loss": 0.7086, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.32814189406723127, |
|
"learning_rate": 0.00019977451906290854, |
|
"loss": 0.7168, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.232636717309027, |
|
"learning_rate": 0.00019974346608945466, |
|
"loss": 0.7444, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2564556626752832, |
|
"learning_rate": 0.0001997104132342614, |
|
"loss": 0.6684, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2328343453656683, |
|
"learning_rate": 0.00019967536116004698, |
|
"loss": 0.814, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.2743961290431054, |
|
"learning_rate": 0.00019963831056961433, |
|
"loss": 0.6833, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.26151241758992244, |
|
"learning_rate": 0.00019959926220583713, |
|
"loss": 0.7213, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.23479176176270686, |
|
"learning_rate": 0.000199558216851645, |
|
"loss": 0.7367, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2620759960364289, |
|
"learning_rate": 0.00019951517533000764, |
|
"loss": 0.6078, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2558927478335512, |
|
"learning_rate": 0.00019947013850391847, |
|
"loss": 0.712, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.25988357901337633, |
|
"learning_rate": 0.00019942310727637724, |
|
"loss": 0.7728, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2750603881771427, |
|
"learning_rate": 0.000199374082590372, |
|
"loss": 0.7573, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.22168568020157275, |
|
"learning_rate": 0.00019932306542886009, |
|
"loss": 0.741, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2776057859080138, |
|
"learning_rate": 0.0001992700568147485, |
|
"loss": 0.7221, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.309528370273256, |
|
"learning_rate": 0.00019921505781087334, |
|
"loss": 0.7362, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3228391807985929, |
|
"learning_rate": 0.00019915806951997862, |
|
"loss": 0.7958, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.23972118913650053, |
|
"learning_rate": 0.00019909909308469398, |
|
"loss": 0.76, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.24351868304057644, |
|
"learning_rate": 0.0001990381296875118, |
|
"loss": 0.6869, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.35582198947398647, |
|
"learning_rate": 0.0001989751805507637, |
|
"loss": 0.814, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.2711433452256402, |
|
"learning_rate": 0.0001989102469365958, |
|
"loss": 0.8162, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.23300078896983795, |
|
"learning_rate": 0.00019884333014694345, |
|
"loss": 0.7118, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.2640371108479178, |
|
"learning_rate": 0.00019877443152350527, |
|
"loss": 0.7364, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.24281805118776953, |
|
"learning_rate": 0.00019870355244771607, |
|
"loss": 0.6726, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.2937389542412289, |
|
"learning_rate": 0.0001986306943407193, |
|
"loss": 0.7418, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.244991044922138, |
|
"learning_rate": 0.00019855585866333835, |
|
"loss": 0.7476, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.2799677386092935, |
|
"learning_rate": 0.00019847904691604757, |
|
"loss": 0.751, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.28203702245279344, |
|
"learning_rate": 0.00019840026063894193, |
|
"loss": 0.6952, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.32320414101450645, |
|
"learning_rate": 0.0001983195014117062, |
|
"loss": 0.7425, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.2991698548751533, |
|
"learning_rate": 0.00019823677085358335, |
|
"loss": 0.6539, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.279534327063564, |
|
"learning_rate": 0.00019815207062334197, |
|
"loss": 0.6844, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.23855364154241135, |
|
"learning_rate": 0.00019806540241924317, |
|
"loss": 0.7484, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.2750191162103587, |
|
"learning_rate": 0.00019797676797900633, |
|
"loss": 0.7655, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.2592922229570989, |
|
"learning_rate": 0.00019788616907977441, |
|
"loss": 0.7368, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.21539765246350112, |
|
"learning_rate": 0.0001977936075380783, |
|
"loss": 0.6314, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.24593391755509492, |
|
"learning_rate": 0.00019769908520980034, |
|
"loss": 0.7463, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.277301291124461, |
|
"learning_rate": 0.00019760260399013708, |
|
"loss": 0.721, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.2676695353061291, |
|
"learning_rate": 0.00019750416581356146, |
|
"loss": 0.7112, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.24096399922364115, |
|
"learning_rate": 0.0001974037726537838, |
|
"loss": 0.6304, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2973476162334585, |
|
"learning_rate": 0.00019730142652371236, |
|
"loss": 0.6467, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2766324556390413, |
|
"learning_rate": 0.00019719712947541295, |
|
"loss": 0.6812, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.18389408950636116, |
|
"learning_rate": 0.0001970908836000678, |
|
"loss": 0.6861, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2410351531016274, |
|
"learning_rate": 0.00019698269102793358, |
|
"loss": 0.6532, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.30102039950416376, |
|
"learning_rate": 0.00019687255392829877, |
|
"loss": 0.6568, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.23183235360102647, |
|
"learning_rate": 0.00019676047450944008, |
|
"loss": 0.5751, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.2933000436636493, |
|
"learning_rate": 0.0001966464550185782, |
|
"loss": 0.7648, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.277707855647548, |
|
"learning_rate": 0.00019653049774183282, |
|
"loss": 0.5829, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.2734745539147106, |
|
"learning_rate": 0.0001964126050041767, |
|
"loss": 0.7626, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.27801067049837663, |
|
"learning_rate": 0.000196292779169389, |
|
"loss": 0.6745, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.35804549859433416, |
|
"learning_rate": 0.0001961710226400081, |
|
"loss": 0.7154, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.33687341140848004, |
|
"learning_rate": 0.00019604733785728317, |
|
"loss": 0.7147, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3002177976890752, |
|
"learning_rate": 0.00019592172730112544, |
|
"loss": 0.708, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.29390663775324355, |
|
"learning_rate": 0.00019579419349005837, |
|
"loss": 0.7968, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.2746665644289368, |
|
"learning_rate": 0.00019566473898116713, |
|
"loss": 0.7023, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.30268203726113624, |
|
"learning_rate": 0.00019553336637004735, |
|
"loss": 0.742, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.2203190362696549, |
|
"learning_rate": 0.0001954000782907532, |
|
"loss": 0.7494, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.27657841960508833, |
|
"learning_rate": 0.00019526487741574437, |
|
"loss": 0.7819, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.30505267558261334, |
|
"learning_rate": 0.00019512776645583263, |
|
"loss": 0.6908, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.275012136618905, |
|
"learning_rate": 0.0001949887481601274, |
|
"loss": 0.812, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.4270284365039847, |
|
"learning_rate": 0.00019484782531598073, |
|
"loss": 0.6919, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.33434183829428676, |
|
"learning_rate": 0.0001947050007489313, |
|
"loss": 0.8373, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.30427892181618366, |
|
"learning_rate": 0.00019456027732264784, |
|
"loss": 0.7148, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3415786735415135, |
|
"learning_rate": 0.00019441365793887162, |
|
"loss": 0.7583, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2507569242816778, |
|
"learning_rate": 0.00019426514553735848, |
|
"loss": 0.7327, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.21839556494141277, |
|
"learning_rate": 0.00019411474309581958, |
|
"loss": 0.6872, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.27502259439590165, |
|
"learning_rate": 0.00019396245362986197, |
|
"loss": 0.7212, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.31223346554540926, |
|
"learning_rate": 0.00019380828019292798, |
|
"loss": 0.6689, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.36757959748248226, |
|
"learning_rate": 0.00019365222587623405, |
|
"loss": 0.6952, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.22435035120320537, |
|
"learning_rate": 0.00019349429380870873, |
|
"loss": 0.7388, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.2829985423238805, |
|
"learning_rate": 0.00019333448715692995, |
|
"loss": 0.7512, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.2496925408376058, |
|
"learning_rate": 0.0001931728091250615, |
|
"loss": 0.785, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.25741128000037666, |
|
"learning_rate": 0.00019300926295478884, |
|
"loss": 0.769, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.27594077518066623, |
|
"learning_rate": 0.00019284385192525405, |
|
"loss": 0.692, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.2853537905685808, |
|
"learning_rate": 0.0001926765793529902, |
|
"loss": 0.7143, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.2651467635782436, |
|
"learning_rate": 0.00019250744859185468, |
|
"loss": 0.7936, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3346954952733162, |
|
"learning_rate": 0.00019233646303296205, |
|
"loss": 0.7081, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.26867358358733323, |
|
"learning_rate": 0.000192163626104616, |
|
"loss": 0.6809, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.28504787352387484, |
|
"learning_rate": 0.00019198894127224074, |
|
"loss": 0.7529, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3899181097549748, |
|
"learning_rate": 0.00019181241203831137, |
|
"loss": 0.7733, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3386788353875776, |
|
"learning_rate": 0.0001916340419422837, |
|
"loss": 0.7467, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.27850551310804966, |
|
"learning_rate": 0.00019145383456052327, |
|
"loss": 0.6916, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.2671031051978256, |
|
"learning_rate": 0.00019127179350623372, |
|
"loss": 0.6951, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3758548589898052, |
|
"learning_rate": 0.00019108792242938425, |
|
"loss": 0.6892, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.301048604600236, |
|
"learning_rate": 0.0001909022250166365, |
|
"loss": 0.6979, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.23229573279853194, |
|
"learning_rate": 0.00019071470499127058, |
|
"loss": 0.6935, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.30646168773183613, |
|
"learning_rate": 0.00019052536611311046, |
|
"loss": 0.7406, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.23840081090154241, |
|
"learning_rate": 0.0001903342121784486, |
|
"loss": 0.6804, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3415488557571022, |
|
"learning_rate": 0.00019014124701996973, |
|
"loss": 0.7208, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.2437048041319469, |
|
"learning_rate": 0.00018994647450667413, |
|
"loss": 0.7033, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.27194596378878544, |
|
"learning_rate": 0.00018974989854379996, |
|
"loss": 0.7523, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.29587923128889887, |
|
"learning_rate": 0.00018955152307274507, |
|
"loss": 0.687, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.2461081590732809, |
|
"learning_rate": 0.00018935135207098785, |
|
"loss": 0.6655, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.2924444087440772, |
|
"learning_rate": 0.00018914938955200754, |
|
"loss": 0.6819, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.2553944811817433, |
|
"learning_rate": 0.00018894563956520374, |
|
"loss": 0.7684, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.28957181182369396, |
|
"learning_rate": 0.0001887401061958153, |
|
"loss": 0.7766, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.2785032642860862, |
|
"learning_rate": 0.00018853279356483826, |
|
"loss": 0.7712, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.28527351035777204, |
|
"learning_rate": 0.00018832370582894334, |
|
"loss": 0.6425, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3575295869608585, |
|
"learning_rate": 0.00018811284718039256, |
|
"loss": 0.6777, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.27729585332023104, |
|
"learning_rate": 0.00018790022184695523, |
|
"loss": 0.6822, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.35797548004437607, |
|
"learning_rate": 0.00018768583409182305, |
|
"loss": 0.7349, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.23438312320664967, |
|
"learning_rate": 0.00018746968821352483, |
|
"loss": 0.7424, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.30004449608881456, |
|
"learning_rate": 0.00018725178854584007, |
|
"loss": 0.7366, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.25679879736213285, |
|
"learning_rate": 0.00018703213945771229, |
|
"loss": 0.7473, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.28382114645109796, |
|
"learning_rate": 0.00018681074535316125, |
|
"loss": 0.8144, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.24972781946965186, |
|
"learning_rate": 0.00018658761067119484, |
|
"loss": 0.6927, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.30834044136784544, |
|
"learning_rate": 0.00018636273988571991, |
|
"loss": 0.6803, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.2649505204628836, |
|
"learning_rate": 0.00018613613750545255, |
|
"loss": 0.6411, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.27400500427151636, |
|
"learning_rate": 0.0001859078080738279, |
|
"loss": 0.7043, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2723459029072118, |
|
"learning_rate": 0.00018567775616890888, |
|
"loss": 0.7234, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2635191155504064, |
|
"learning_rate": 0.00018544598640329432, |
|
"loss": 0.7131, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.24356420411141796, |
|
"learning_rate": 0.00018521250342402672, |
|
"loss": 0.7638, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.21340372473955896, |
|
"learning_rate": 0.00018497731191249894, |
|
"loss": 0.7671, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.24554004517500114, |
|
"learning_rate": 0.00018474041658436027, |
|
"loss": 0.7679, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2556998456537391, |
|
"learning_rate": 0.000184501822189422, |
|
"loss": 0.7323, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.25911000225159375, |
|
"learning_rate": 0.0001842615335115621, |
|
"loss": 0.7126, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.29529555659956847, |
|
"learning_rate": 0.00018401955536862948, |
|
"loss": 0.8061, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2923465430082093, |
|
"learning_rate": 0.00018377589261234705, |
|
"loss": 0.7151, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2186889691059259, |
|
"learning_rate": 0.0001835305501282148, |
|
"loss": 0.8274, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2841289282189221, |
|
"learning_rate": 0.00018328353283541158, |
|
"loss": 0.724, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2951337244107648, |
|
"learning_rate": 0.00018303484568669667, |
|
"loss": 0.6899, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.15330703509842647, |
|
"learning_rate": 0.00018278449366831035, |
|
"loss": 0.5995, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.24116956612307694, |
|
"learning_rate": 0.00018253248179987388, |
|
"loss": 0.7076, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2871300642732821, |
|
"learning_rate": 0.00018227881513428908, |
|
"loss": 0.7362, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2979215272527789, |
|
"learning_rate": 0.0001820234987576368, |
|
"loss": 0.7023, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.2663915798849715, |
|
"learning_rate": 0.00018176653778907492, |
|
"loss": 0.7347, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.29904674261298314, |
|
"learning_rate": 0.00018150793738073602, |
|
"loss": 0.6495, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.3165528665189004, |
|
"learning_rate": 0.00018124770271762364, |
|
"loss": 0.6942, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.3378794686368825, |
|
"learning_rate": 0.00018098583901750867, |
|
"loss": 0.7095, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1302992690531124, |
|
"learning_rate": 0.00018072235153082455, |
|
"loss": 0.6416, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.254510249703615, |
|
"learning_rate": 0.00018045724554056214, |
|
"loss": 0.7611, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.32229289030294994, |
|
"learning_rate": 0.0001801905263621636, |
|
"loss": 0.7632, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.2671186610260709, |
|
"learning_rate": 0.0001799221993434159, |
|
"loss": 0.7229, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.21433416331211266, |
|
"learning_rate": 0.00017965226986434377, |
|
"loss": 0.6888, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.39600504574420103, |
|
"learning_rate": 0.00017938074333710157, |
|
"loss": 0.7202, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.3134397942298124, |
|
"learning_rate": 0.00017910762520586485, |
|
"loss": 0.7254, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.24898380650617621, |
|
"learning_rate": 0.00017883292094672128, |
|
"loss": 0.7194, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.32622878287643925, |
|
"learning_rate": 0.00017855663606756078, |
|
"loss": 0.6838, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2301148369047203, |
|
"learning_rate": 0.00017827877610796514, |
|
"loss": 0.6755, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.35924771537726957, |
|
"learning_rate": 0.00017799934663909682, |
|
"loss": 0.7154, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.3190069733998683, |
|
"learning_rate": 0.00017771835326358743, |
|
"loss": 0.6468, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.26523634849673267, |
|
"learning_rate": 0.00017743580161542525, |
|
"loss": 0.656, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2073552819004321, |
|
"learning_rate": 0.00017715169735984233, |
|
"loss": 0.662, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.29144492490212454, |
|
"learning_rate": 0.00017686604619320093, |
|
"loss": 0.7378, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.2826085444372445, |
|
"learning_rate": 0.0001765788538428792, |
|
"loss": 0.8061, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.3503084495094886, |
|
"learning_rate": 0.00017629012606715648, |
|
"loss": 0.7476, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.40190773179750555, |
|
"learning_rate": 0.00017599986865509767, |
|
"loss": 0.7002, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.3512537138147811, |
|
"learning_rate": 0.00017570808742643746, |
|
"loss": 0.7955, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2686040145129207, |
|
"learning_rate": 0.00017541478823146327, |
|
"loss": 0.7205, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2572903388079848, |
|
"learning_rate": 0.00017511997695089822, |
|
"loss": 0.7258, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2746446577869629, |
|
"learning_rate": 0.00017482365949578302, |
|
"loss": 0.6587, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2656858612857696, |
|
"learning_rate": 0.00017452584180735774, |
|
"loss": 0.7334, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.25317024575228997, |
|
"learning_rate": 0.00017422652985694237, |
|
"loss": 0.7051, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2309813519294251, |
|
"learning_rate": 0.00017392572964581725, |
|
"loss": 0.7013, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.23745896843674344, |
|
"learning_rate": 0.00017362344720510278, |
|
"loss": 0.706, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2965283402007185, |
|
"learning_rate": 0.00017331968859563834, |
|
"loss": 0.7749, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.26530828241172705, |
|
"learning_rate": 0.00017301445990786102, |
|
"loss": 0.7566, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.3188702889740104, |
|
"learning_rate": 0.00017270776726168317, |
|
"loss": 0.7122, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.269581496964367, |
|
"learning_rate": 0.00017239961680637, |
|
"loss": 0.7457, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2706727342486633, |
|
"learning_rate": 0.00017209001472041617, |
|
"loss": 0.7937, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.25517714775768086, |
|
"learning_rate": 0.0001717789672114218, |
|
"loss": 0.7613, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2636311888681917, |
|
"learning_rate": 0.00017146648051596822, |
|
"loss": 0.7079, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.2989402934427053, |
|
"learning_rate": 0.0001711525608994927, |
|
"loss": 0.6716, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.22104379693485937, |
|
"learning_rate": 0.00017083721465616306, |
|
"loss": 0.6682, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.330168955232277, |
|
"learning_rate": 0.00017052044810875126, |
|
"loss": 0.8137, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.31150819405882774, |
|
"learning_rate": 0.00017020226760850677, |
|
"loss": 0.6959, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.32633550990131655, |
|
"learning_rate": 0.00016988267953502913, |
|
"loss": 0.6908, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.2247055690731712, |
|
"learning_rate": 0.0001695616902961401, |
|
"loss": 0.6666, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.2810455775039889, |
|
"learning_rate": 0.00016923930632775516, |
|
"loss": 0.7025, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.25483406919968543, |
|
"learning_rate": 0.00016891553409375444, |
|
"loss": 0.7234, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.28464930239979624, |
|
"learning_rate": 0.00016859038008585326, |
|
"loss": 0.7341, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.32647501669318885, |
|
"learning_rate": 0.0001682638508234717, |
|
"loss": 0.6706, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.2092534288356594, |
|
"learning_rate": 0.0001679359528536041, |
|
"loss": 0.6493, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.3422837500211391, |
|
"learning_rate": 0.0001676066927506878, |
|
"loss": 0.7394, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.29359271678876697, |
|
"learning_rate": 0.00016727607711647114, |
|
"loss": 0.6866, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.24439616326346505, |
|
"learning_rate": 0.00016694411257988135, |
|
"loss": 0.6546, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.25431678595144386, |
|
"learning_rate": 0.00016661080579689132, |
|
"loss": 0.7069, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.2643304489008311, |
|
"learning_rate": 0.00016627616345038642, |
|
"loss": 0.6616, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.3025866711501466, |
|
"learning_rate": 0.0001659401922500304, |
|
"loss": 0.7082, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.2576690605267584, |
|
"learning_rate": 0.0001656028989321309, |
|
"loss": 0.6431, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.23465616583898175, |
|
"learning_rate": 0.00016526429025950424, |
|
"loss": 0.6826, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.265312873732132, |
|
"learning_rate": 0.00016492437302134008, |
|
"loss": 0.7189, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.24774713265915735, |
|
"learning_rate": 0.00016458315403306502, |
|
"loss": 0.7884, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.30882736567653557, |
|
"learning_rate": 0.00016424064013620615, |
|
"loss": 0.6989, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.29435047655097046, |
|
"learning_rate": 0.0001638968381982538, |
|
"loss": 0.6983, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.24898245263549598, |
|
"learning_rate": 0.0001635517551125238, |
|
"loss": 0.8087, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.3361644987929908, |
|
"learning_rate": 0.0001632053977980194, |
|
"loss": 0.7269, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.3313137963733492, |
|
"learning_rate": 0.0001628577731992924, |
|
"loss": 0.7212, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.40222894480073607, |
|
"learning_rate": 0.000162508888286304, |
|
"loss": 0.7047, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.28365087568805586, |
|
"learning_rate": 0.00016215875005428499, |
|
"loss": 0.7654, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.22437081577512294, |
|
"learning_rate": 0.00016180736552359553, |
|
"loss": 0.7497, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2713094825399269, |
|
"learning_rate": 0.0001614547417395844, |
|
"loss": 0.772, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.31175625126445866, |
|
"learning_rate": 0.00016110088577244773, |
|
"loss": 0.7136, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.295775151866562, |
|
"learning_rate": 0.0001607458047170872, |
|
"loss": 0.7448, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.29743619662352133, |
|
"learning_rate": 0.00016038950569296785, |
|
"loss": 0.6254, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.23830246086659249, |
|
"learning_rate": 0.00016003199584397528, |
|
"loss": 0.7976, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.24738045684528134, |
|
"learning_rate": 0.00015967328233827249, |
|
"loss": 0.6983, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.27326756366363736, |
|
"learning_rate": 0.000159313372368156, |
|
"loss": 0.6441, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.3475531782988717, |
|
"learning_rate": 0.00015895227314991178, |
|
"loss": 0.6814, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.3148530822785684, |
|
"learning_rate": 0.0001585899919236706, |
|
"loss": 0.6948, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.25377488886498006, |
|
"learning_rate": 0.00015822653595326275, |
|
"loss": 0.7238, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.3013200425652317, |
|
"learning_rate": 0.0001578619125260724, |
|
"loss": 0.75, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.2801858745291455, |
|
"learning_rate": 0.00015749612895289152, |
|
"loss": 0.6473, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2643097355421632, |
|
"learning_rate": 0.00015712919256777335, |
|
"loss": 0.732, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.24859906355405817, |
|
"learning_rate": 0.00015676111072788527, |
|
"loss": 0.684, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.2978599865430522, |
|
"learning_rate": 0.0001563918908133614, |
|
"loss": 0.718, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.3175233807927027, |
|
"learning_rate": 0.00015602154022715435, |
|
"loss": 0.7452, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.29411432367231416, |
|
"learning_rate": 0.00015565006639488722, |
|
"loss": 0.6951, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.24712225993420803, |
|
"learning_rate": 0.0001552774767647043, |
|
"loss": 0.6921, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.296954935837503, |
|
"learning_rate": 0.000154903778807122, |
|
"loss": 0.695, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.3090817515220463, |
|
"learning_rate": 0.0001545289800148789, |
|
"loss": 0.725, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.25680112732997773, |
|
"learning_rate": 0.00015415308790278572, |
|
"loss": 0.7326, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.2713659444254739, |
|
"learning_rate": 0.0001537761100075744, |
|
"loss": 0.7369, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.3498089133387774, |
|
"learning_rate": 0.00015339805388774714, |
|
"loss": 0.7819, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.2657852286056311, |
|
"learning_rate": 0.00015301892712342482, |
|
"loss": 0.7273, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.25285783621953456, |
|
"learning_rate": 0.00015263873731619508, |
|
"loss": 0.766, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.3072970711277045, |
|
"learning_rate": 0.00015225749208895968, |
|
"loss": 0.7195, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.3660032567021492, |
|
"learning_rate": 0.000151875199085782, |
|
"loss": 0.7699, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2924264482609829, |
|
"learning_rate": 0.0001514918659717335, |
|
"loss": 0.7105, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.3309598652416674, |
|
"learning_rate": 0.00015110750043274008, |
|
"loss": 0.732, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.25120067042745076, |
|
"learning_rate": 0.00015072211017542813, |
|
"loss": 0.7447, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.29548371062233497, |
|
"learning_rate": 0.0001503357029269698, |
|
"loss": 0.6245, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2882495017189674, |
|
"learning_rate": 0.00014994828643492827, |
|
"loss": 0.7354, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.2926891269519082, |
|
"learning_rate": 0.00014955986846710222, |
|
"loss": 0.7234, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.3208205900575705, |
|
"learning_rate": 0.00014917045681137026, |
|
"loss": 0.6859, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.28730436475223414, |
|
"learning_rate": 0.00014878005927553456, |
|
"loss": 0.655, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.31375609878578203, |
|
"learning_rate": 0.0001483886836871646, |
|
"loss": 0.7068, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.28634685391130577, |
|
"learning_rate": 0.00014799633789343994, |
|
"loss": 0.7873, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.29219569906271287, |
|
"learning_rate": 0.00014760302976099304, |
|
"loss": 0.6414, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.2809961899361123, |
|
"learning_rate": 0.00014720876717575155, |
|
"loss": 0.689, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.2966023851758411, |
|
"learning_rate": 0.00014681355804278001, |
|
"loss": 0.6939, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.22135705676789802, |
|
"learning_rate": 0.00014641741028612162, |
|
"loss": 0.6388, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.2017382590189762, |
|
"learning_rate": 0.00014602033184863913, |
|
"loss": 0.7172, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.29520895754468235, |
|
"learning_rate": 0.00014562233069185572, |
|
"loss": 0.8331, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.25912176225264727, |
|
"learning_rate": 0.00014522341479579533, |
|
"loss": 0.7583, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.27070806912751083, |
|
"learning_rate": 0.0001448235921588226, |
|
"loss": 0.7706, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.22470926163816077, |
|
"learning_rate": 0.00014442287079748263, |
|
"loss": 0.701, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.26012437368269403, |
|
"learning_rate": 0.00014402125874634012, |
|
"loss": 0.6856, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.27655512567748813, |
|
"learning_rate": 0.00014361876405781832, |
|
"loss": 0.7092, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.22547503156029328, |
|
"learning_rate": 0.00014321539480203764, |
|
"loss": 0.7723, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.35807881396099706, |
|
"learning_rate": 0.00014281115906665374, |
|
"loss": 0.699, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.25312726731980933, |
|
"learning_rate": 0.00014240606495669538, |
|
"loss": 0.694, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3021591327476025, |
|
"learning_rate": 0.00014200012059440207, |
|
"loss": 0.6978, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3776238352797525, |
|
"learning_rate": 0.00014159333411906095, |
|
"loss": 0.7518, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.24540414704743882, |
|
"learning_rate": 0.00014118571368684383, |
|
"loss": 0.7891, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.3138641993671755, |
|
"learning_rate": 0.00014077726747064353, |
|
"loss": 0.6213, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.24104497594679253, |
|
"learning_rate": 0.00014036800365991008, |
|
"loss": 0.6773, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.2712106009342219, |
|
"learning_rate": 0.00013995793046048643, |
|
"loss": 0.6977, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.2847114290804009, |
|
"learning_rate": 0.00013954705609444404, |
|
"loss": 0.6903, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.31132954968304133, |
|
"learning_rate": 0.0001391353887999179, |
|
"loss": 0.7666, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2648924104898787, |
|
"learning_rate": 0.00013872293683094152, |
|
"loss": 0.6656, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2628528269004981, |
|
"learning_rate": 0.00013830970845728126, |
|
"loss": 0.7704, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.21693389371822336, |
|
"learning_rate": 0.00013789571196427055, |
|
"loss": 0.6493, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2447657421175684, |
|
"learning_rate": 0.00013748095565264384, |
|
"loss": 0.6459, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3180367525541911, |
|
"learning_rate": 0.00013706544783837022, |
|
"loss": 0.7049, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2972710048973474, |
|
"learning_rate": 0.00013664919685248649, |
|
"loss": 0.7238, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.29131857236762787, |
|
"learning_rate": 0.00013623221104093025, |
|
"loss": 0.8293, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.27510973199735633, |
|
"learning_rate": 0.0001358144987643726, |
|
"loss": 0.7285, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.23692910598713893, |
|
"learning_rate": 0.00013539606839805036, |
|
"loss": 0.7199, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.29598035037077913, |
|
"learning_rate": 0.00013497692833159828, |
|
"loss": 0.7042, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.25149104979862424, |
|
"learning_rate": 0.00013455708696888085, |
|
"loss": 0.6942, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.25496421670247854, |
|
"learning_rate": 0.00013413655272782356, |
|
"loss": 0.7256, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.2554748374618532, |
|
"learning_rate": 0.00013371533404024438, |
|
"loss": 0.7607, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.2627306476634245, |
|
"learning_rate": 0.00013329343935168466, |
|
"loss": 0.7101, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.24657317727636707, |
|
"learning_rate": 0.00013287087712123962, |
|
"loss": 0.7726, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2559430395751914, |
|
"learning_rate": 0.000132447655821389, |
|
"loss": 0.6601, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.3049767818100991, |
|
"learning_rate": 0.00013202378393782692, |
|
"loss": 0.7961, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.28360752211200474, |
|
"learning_rate": 0.00013159926996929192, |
|
"loss": 0.6639, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.26804633258042826, |
|
"learning_rate": 0.00013117412242739655, |
|
"loss": 0.6503, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2411104477481089, |
|
"learning_rate": 0.00013074834983645657, |
|
"loss": 0.7532, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2900355787539025, |
|
"learning_rate": 0.00013032196073332027, |
|
"loss": 0.7479, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2568819906683512, |
|
"learning_rate": 0.000129894963667197, |
|
"loss": 0.7433, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.22709405983327266, |
|
"learning_rate": 0.00012946736719948607, |
|
"loss": 0.7163, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.28704276569845133, |
|
"learning_rate": 0.00012903917990360485, |
|
"loss": 0.6965, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2573742688385154, |
|
"learning_rate": 0.000128610410364817, |
|
"loss": 0.6919, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.3570766420018621, |
|
"learning_rate": 0.00012818106718006023, |
|
"loss": 0.6073, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.2471571809588589, |
|
"learning_rate": 0.00012775115895777417, |
|
"loss": 0.753, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.25821147959621743, |
|
"learning_rate": 0.0001273206943177274, |
|
"loss": 0.6319, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.30013480040772167, |
|
"learning_rate": 0.00012688968189084493, |
|
"loss": 0.7263, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.28654821268777825, |
|
"learning_rate": 0.000126458130319035, |
|
"loss": 0.7259, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.29336529390659893, |
|
"learning_rate": 0.00012602604825501587, |
|
"loss": 0.7032, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.20273978059145287, |
|
"learning_rate": 0.00012559344436214223, |
|
"loss": 0.6322, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.2483147925181707, |
|
"learning_rate": 0.00012516032731423165, |
|
"loss": 0.7185, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.28968579800640504, |
|
"learning_rate": 0.00012472670579539055, |
|
"loss": 0.7028, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.24871377665254096, |
|
"learning_rate": 0.00012429258849984014, |
|
"loss": 0.7093, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.23758441118744317, |
|
"learning_rate": 0.00012385798413174206, |
|
"loss": 0.7267, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.29112304576955467, |
|
"learning_rate": 0.00012342290140502388, |
|
"loss": 0.7714, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.246124461049581, |
|
"learning_rate": 0.00012298734904320438, |
|
"loss": 0.6727, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.25856322825399286, |
|
"learning_rate": 0.00012255133577921868, |
|
"loss": 0.7769, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.22410369058426746, |
|
"learning_rate": 0.00012211487035524305, |
|
"loss": 0.6866, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.28079722454610034, |
|
"learning_rate": 0.0001216779615225197, |
|
"loss": 0.6963, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.30696044399665856, |
|
"learning_rate": 0.00012124061804118137, |
|
"loss": 0.7051, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.247967331116475, |
|
"learning_rate": 0.00012080284868007541, |
|
"loss": 0.6944, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.21128171750447838, |
|
"learning_rate": 0.00012036466221658847, |
|
"loss": 0.7199, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.31901890228430696, |
|
"learning_rate": 0.0001199260674364699, |
|
"loss": 0.7786, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.2755016732959131, |
|
"learning_rate": 0.00011948707313365614, |
|
"loss": 0.8069, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.28668018656021443, |
|
"learning_rate": 0.00011904768811009405, |
|
"loss": 0.7157, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.3328138328405363, |
|
"learning_rate": 0.00011860792117556454, |
|
"loss": 0.7099, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.21788073287798893, |
|
"learning_rate": 0.00011816778114750593, |
|
"loss": 0.7644, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.23062823471771338, |
|
"learning_rate": 0.00011772727685083724, |
|
"loss": 0.7716, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.26093096066749466, |
|
"learning_rate": 0.00011728641711778103, |
|
"loss": 0.7229, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.23801319602695262, |
|
"learning_rate": 0.00011684521078768658, |
|
"loss": 0.6845, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.18154354473533865, |
|
"learning_rate": 0.00011640366670685248, |
|
"loss": 0.6315, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.29572089884772323, |
|
"learning_rate": 0.00011596179372834924, |
|
"loss": 0.8437, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.27149388513396633, |
|
"learning_rate": 0.00011551960071184195, |
|
"loss": 0.8232, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.24540172557967463, |
|
"learning_rate": 0.00011507709652341256, |
|
"loss": 0.7132, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.26964683600534267, |
|
"learning_rate": 0.00011463429003538196, |
|
"loss": 0.7275, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.287051761373442, |
|
"learning_rate": 0.00011419119012613233, |
|
"loss": 0.7184, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.2663459541020728, |
|
"learning_rate": 0.000113747805679929, |
|
"loss": 0.7964, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.28638851869565946, |
|
"learning_rate": 0.00011330414558674234, |
|
"loss": 0.7129, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.3786862886137478, |
|
"learning_rate": 0.00011286021874206952, |
|
"loss": 0.6706, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.34276302477862775, |
|
"learning_rate": 0.00011241603404675609, |
|
"loss": 0.7856, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.3366461922626376, |
|
"learning_rate": 0.00011197160040681762, |
|
"loss": 0.8178, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.25846779036672146, |
|
"learning_rate": 0.00011152692673326111, |
|
"loss": 0.7387, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2364492132114722, |
|
"learning_rate": 0.0001110820219419062, |
|
"loss": 0.7434, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2699184257070918, |
|
"learning_rate": 0.00011063689495320661, |
|
"loss": 0.7102, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2506412498240298, |
|
"learning_rate": 0.0001101915546920711, |
|
"loss": 0.7451, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.4833851120529976, |
|
"learning_rate": 0.00010974601008768461, |
|
"loss": 0.6558, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.25503131460937667, |
|
"learning_rate": 0.00010930027007332923, |
|
"loss": 0.6642, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.24985640167049433, |
|
"learning_rate": 0.00010885434358620507, |
|
"loss": 0.737, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.28451521701088694, |
|
"learning_rate": 0.00010840823956725103, |
|
"loss": 0.7406, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.29244380187254954, |
|
"learning_rate": 0.00010796196696096564, |
|
"loss": 0.7047, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.2236043726332343, |
|
"learning_rate": 0.00010751553471522757, |
|
"loss": 0.7312, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.27232955976439666, |
|
"learning_rate": 0.00010706895178111634, |
|
"loss": 0.7284, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.30610542432506455, |
|
"learning_rate": 0.00010662222711273279, |
|
"loss": 0.6335, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.23424590700585504, |
|
"learning_rate": 0.00010617536966701962, |
|
"loss": 0.7269, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.25989610387137024, |
|
"learning_rate": 0.00010572838840358168, |
|
"loss": 0.6304, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.2830081164838504, |
|
"learning_rate": 0.00010528129228450645, |
|
"loss": 0.6597, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2603374987935435, |
|
"learning_rate": 0.00010483409027418425, |
|
"loss": 0.7489, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.24810278191222906, |
|
"learning_rate": 0.00010438679133912858, |
|
"loss": 0.6502, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2528579675694808, |
|
"learning_rate": 0.00010393940444779635, |
|
"loss": 0.6969, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.28413657506453693, |
|
"learning_rate": 0.00010349193857040787, |
|
"loss": 0.7726, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.3144890009802897, |
|
"learning_rate": 0.00010304440267876727, |
|
"loss": 0.7856, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2397459238431048, |
|
"learning_rate": 0.00010259680574608248, |
|
"loss": 0.764, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2868615633571584, |
|
"learning_rate": 0.00010214915674678523, |
|
"loss": 0.7677, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2497448183867612, |
|
"learning_rate": 0.00010170146465635126, |
|
"loss": 0.7783, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.253908792089182, |
|
"learning_rate": 0.00010125373845112034, |
|
"loss": 0.7461, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.23687933586461904, |
|
"learning_rate": 0.00010080598710811615, |
|
"loss": 0.69, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.3734210753745544, |
|
"learning_rate": 0.00010035821960486643, |
|
"loss": 0.77, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.2788998422640762, |
|
"learning_rate": 9.9910444919223e-05, |
|
"loss": 0.7058, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.3178188169556896, |
|
"learning_rate": 9.946267202918157e-05, |
|
"loss": 0.6965, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.26945241065973596, |
|
"learning_rate": 9.901490991270201e-05, |
|
"loss": 0.702, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5207203316817794, |
|
"learning_rate": 9.856716754752796e-05, |
|
"loss": 0.6478, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.29256712809053254, |
|
"learning_rate": 9.811945391100724e-05, |
|
"loss": 0.7372, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.25601912122707865, |
|
"learning_rate": 9.767177797991155e-05, |
|
"loss": 0.7444, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.23247510741792463, |
|
"learning_rate": 9.722414873025664e-05, |
|
"loss": 0.6836, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2460904591621441, |
|
"learning_rate": 9.677657513712221e-05, |
|
"loss": 0.7268, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.26208399329605536, |
|
"learning_rate": 9.632906617447212e-05, |
|
"loss": 0.6924, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.22915215031643407, |
|
"learning_rate": 9.588163081497427e-05, |
|
"loss": 0.7864, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.3119119948293986, |
|
"learning_rate": 9.543427802982095e-05, |
|
"loss": 0.768, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.2973539056568546, |
|
"learning_rate": 9.498701678854865e-05, |
|
"loss": 0.6836, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.24915238391515174, |
|
"learning_rate": 9.453985605885851e-05, |
|
"loss": 0.7307, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.29466985701358944, |
|
"learning_rate": 9.409280480643628e-05, |
|
"loss": 0.7751, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.31502684795729613, |
|
"learning_rate": 9.364587199477276e-05, |
|
"loss": 0.7183, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.23712422669700867, |
|
"learning_rate": 9.319906658498389e-05, |
|
"loss": 0.7522, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.2570700308864798, |
|
"learning_rate": 9.275239753563128e-05, |
|
"loss": 0.7171, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.23813307578157708, |
|
"learning_rate": 9.230587380254237e-05, |
|
"loss": 0.689, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.2656059785729245, |
|
"learning_rate": 9.185950433863107e-05, |
|
"loss": 0.6694, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1857069050131154, |
|
"learning_rate": 9.141329809371803e-05, |
|
"loss": 0.7626, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.25034208426960974, |
|
"learning_rate": 9.096726401435146e-05, |
|
"loss": 0.7116, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.22067489301839913, |
|
"learning_rate": 9.052141104362748e-05, |
|
"loss": 0.6499, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.23199516425281616, |
|
"learning_rate": 9.007574812101107e-05, |
|
"loss": 0.7501, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.25421299632271965, |
|
"learning_rate": 8.963028418215653e-05, |
|
"loss": 0.8147, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.260651537286194, |
|
"learning_rate": 8.918502815872865e-05, |
|
"loss": 0.7485, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.2655797468541603, |
|
"learning_rate": 8.873998897822336e-05, |
|
"loss": 0.7505, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.2917164025171058, |
|
"learning_rate": 8.829517556378888e-05, |
|
"loss": 0.6755, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.3350035349023889, |
|
"learning_rate": 8.785059683404672e-05, |
|
"loss": 0.7015, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.3253925397932177, |
|
"learning_rate": 8.740626170291297e-05, |
|
"loss": 0.7551, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.2899593792802913, |
|
"learning_rate": 8.696217907941941e-05, |
|
"loss": 0.7185, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.274238966245006, |
|
"learning_rate": 8.65183578675351e-05, |
|
"loss": 0.7031, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.27886346870120726, |
|
"learning_rate": 8.607480696598762e-05, |
|
"loss": 0.61, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.3552726057492985, |
|
"learning_rate": 8.563153526808484e-05, |
|
"loss": 0.7706, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.26275345892575847, |
|
"learning_rate": 8.518855166153644e-05, |
|
"loss": 0.6925, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.3537795284485823, |
|
"learning_rate": 8.474586502827591e-05, |
|
"loss": 0.7005, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2113496203404715, |
|
"learning_rate": 8.43034842442822e-05, |
|
"loss": 0.6709, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.46932602124140005, |
|
"learning_rate": 8.386141817940213e-05, |
|
"loss": 0.7019, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2647970131377573, |
|
"learning_rate": 8.341967569717202e-05, |
|
"loss": 0.6522, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2495289774517795, |
|
"learning_rate": 8.297826565464057e-05, |
|
"loss": 0.6333, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.317401195281625, |
|
"learning_rate": 8.253719690219079e-05, |
|
"loss": 0.7297, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.2722505836316711, |
|
"learning_rate": 8.20964782833628e-05, |
|
"loss": 0.6206, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.3347413066633305, |
|
"learning_rate": 8.165611863467644e-05, |
|
"loss": 0.7258, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.23149723477405734, |
|
"learning_rate": 8.12161267854542e-05, |
|
"loss": 0.6872, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.21852198583337765, |
|
"learning_rate": 8.077651155764387e-05, |
|
"loss": 0.6529, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.23663927702457488, |
|
"learning_rate": 8.033728176564216e-05, |
|
"loss": 0.7468, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.28901081689808944, |
|
"learning_rate": 7.98984462161175e-05, |
|
"loss": 0.7436, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.25802965477339335, |
|
"learning_rate": 7.946001370783375e-05, |
|
"loss": 0.7899, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.22951991679268438, |
|
"learning_rate": 7.902199303147363e-05, |
|
"loss": 0.6861, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.3057181147340439, |
|
"learning_rate": 7.858439296946263e-05, |
|
"loss": 0.7769, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.3159742724328365, |
|
"learning_rate": 7.814722229579264e-05, |
|
"loss": 0.6756, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.27245093189237785, |
|
"learning_rate": 7.771048977584641e-05, |
|
"loss": 0.6128, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.324925192823721, |
|
"learning_rate": 7.727420416622144e-05, |
|
"loss": 0.8306, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.2999192623424217, |
|
"learning_rate": 7.683837421455466e-05, |
|
"loss": 0.6782, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.25980740031389876, |
|
"learning_rate": 7.640300865934687e-05, |
|
"loss": 0.6913, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.25802354488510615, |
|
"learning_rate": 7.596811622978765e-05, |
|
"loss": 0.7178, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.26975633006496086, |
|
"learning_rate": 7.553370564558032e-05, |
|
"loss": 0.7667, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.3248726081338585, |
|
"learning_rate": 7.509978561676699e-05, |
|
"loss": 0.7559, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.27315448139602455, |
|
"learning_rate": 7.46663648435541e-05, |
|
"loss": 0.6903, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.2950802099290335, |
|
"learning_rate": 7.423345201613778e-05, |
|
"loss": 0.6731, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.35343151310449955, |
|
"learning_rate": 7.380105581452987e-05, |
|
"loss": 0.735, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2766967860281418, |
|
"learning_rate": 7.336918490838356e-05, |
|
"loss": 0.7779, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.255190126277391, |
|
"learning_rate": 7.293784795681994e-05, |
|
"loss": 0.7213, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.23722517953499153, |
|
"learning_rate": 7.250705360825392e-05, |
|
"loss": 0.6734, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.30683380454221565, |
|
"learning_rate": 7.207681050022132e-05, |
|
"loss": 0.7439, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.3072464317121022, |
|
"learning_rate": 7.16471272592053e-05, |
|
"loss": 0.7804, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2565247543734341, |
|
"learning_rate": 7.121801250046363e-05, |
|
"loss": 0.7678, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2106442564735487, |
|
"learning_rate": 7.078947482785576e-05, |
|
"loss": 0.7063, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.2476449919758559, |
|
"learning_rate": 7.036152283367056e-05, |
|
"loss": 0.6918, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.247411584313298, |
|
"learning_rate": 6.993416509845376e-05, |
|
"loss": 0.7182, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.280770588683194, |
|
"learning_rate": 6.950741019083617e-05, |
|
"loss": 0.7192, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.28453079995263136, |
|
"learning_rate": 6.908126666736165e-05, |
|
"loss": 0.6912, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.34116367134660075, |
|
"learning_rate": 6.865574307231575e-05, |
|
"loss": 0.7064, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.20407453115123925, |
|
"learning_rate": 6.823084793755417e-05, |
|
"loss": 0.6835, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.34227090827399664, |
|
"learning_rate": 6.780658978233199e-05, |
|
"loss": 0.7364, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.20382515840687232, |
|
"learning_rate": 6.73829771131325e-05, |
|
"loss": 0.7278, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.32066035575970564, |
|
"learning_rate": 6.696001842349702e-05, |
|
"loss": 0.6735, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.30087509915880645, |
|
"learning_rate": 6.653772219385427e-05, |
|
"loss": 0.7563, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2609826902639363, |
|
"learning_rate": 6.611609689135056e-05, |
|
"loss": 0.7865, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2541688409079705, |
|
"learning_rate": 6.569515096967991e-05, |
|
"loss": 0.645, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.18975728074739295, |
|
"learning_rate": 6.527489286891459e-05, |
|
"loss": 0.5808, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.2395876243767242, |
|
"learning_rate": 6.485533101533583e-05, |
|
"loss": 0.6973, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.3063762562351843, |
|
"learning_rate": 6.443647382126509e-05, |
|
"loss": 0.7221, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2674321599248453, |
|
"learning_rate": 6.401832968489501e-05, |
|
"loss": 0.6818, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2231267411467715, |
|
"learning_rate": 6.360090699012145e-05, |
|
"loss": 0.7404, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.27582828393948144, |
|
"learning_rate": 6.3184214106375e-05, |
|
"loss": 0.6741, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.27355370388789374, |
|
"learning_rate": 6.27682593884535e-05, |
|
"loss": 0.6353, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.24340603859531101, |
|
"learning_rate": 6.235305117635428e-05, |
|
"loss": 0.7348, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.24374512948837185, |
|
"learning_rate": 6.193859779510712e-05, |
|
"loss": 0.6906, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.2617782514018871, |
|
"learning_rate": 6.152490755460715e-05, |
|
"loss": 0.6707, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.25140263838285776, |
|
"learning_rate": 6.111198874944845e-05, |
|
"loss": 0.6892, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2382806536370909, |
|
"learning_rate": 6.0699849658757545e-05, |
|
"loss": 0.7549, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2176694237385903, |
|
"learning_rate": 6.0288498546027536e-05, |
|
"loss": 0.7753, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.23853235406621046, |
|
"learning_rate": 5.987794365895229e-05, |
|
"loss": 0.6735, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.33567228876946426, |
|
"learning_rate": 5.946819322926127e-05, |
|
"loss": 0.6817, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.2947916364685711, |
|
"learning_rate": 5.9059255472554195e-05, |
|
"loss": 0.6897, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.25054528848541685, |
|
"learning_rate": 5.865113858813673e-05, |
|
"loss": 0.6654, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.295602257498029, |
|
"learning_rate": 5.824385075885557e-05, |
|
"loss": 0.6837, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.22560608641117053, |
|
"learning_rate": 5.783740015093484e-05, |
|
"loss": 0.7137, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2710250240814673, |
|
"learning_rate": 5.743179491381204e-05, |
|
"loss": 0.7015, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.25266821650481586, |
|
"learning_rate": 5.702704317997492e-05, |
|
"loss": 0.6724, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.23287193866674372, |
|
"learning_rate": 5.662315306479806e-05, |
|
"loss": 0.7023, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.3278896346775186, |
|
"learning_rate": 5.6220132666380635e-05, |
|
"loss": 0.6209, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.3090011108241083, |
|
"learning_rate": 5.581799006538354e-05, |
|
"loss": 0.7575, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.30486291365985196, |
|
"learning_rate": 5.541673332486773e-05, |
|
"loss": 0.741, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.29222447612374014, |
|
"learning_rate": 5.5016370490132364e-05, |
|
"loss": 0.8456, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.30928681296284377, |
|
"learning_rate": 5.4616909588553674e-05, |
|
"loss": 0.785, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2751241257699663, |
|
"learning_rate": 5.4218358629423794e-05, |
|
"loss": 0.797, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.3020155622331884, |
|
"learning_rate": 5.3820725603790346e-05, |
|
"loss": 0.7335, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.3132294366433861, |
|
"learning_rate": 5.342401848429615e-05, |
|
"loss": 0.7751, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.2617050345460146, |
|
"learning_rate": 5.30282452250193e-05, |
|
"loss": 0.7865, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.3088811287968099, |
|
"learning_rate": 5.263341376131382e-05, |
|
"loss": 0.7012, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.3053801166515793, |
|
"learning_rate": 5.223953200965055e-05, |
|
"loss": 0.733, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.23354984138597276, |
|
"learning_rate": 5.1846607867458196e-05, |
|
"loss": 0.8136, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2958514046814973, |
|
"learning_rate": 5.145464921296537e-05, |
|
"loss": 0.7738, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.30080922548213934, |
|
"learning_rate": 5.1063663905042225e-05, |
|
"loss": 0.6893, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.3034194981171166, |
|
"learning_rate": 5.067365978304315e-05, |
|
"loss": 0.6922, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.33312809082970085, |
|
"learning_rate": 5.0284644666649485e-05, |
|
"loss": 0.6818, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2615876279834696, |
|
"learning_rate": 4.9896626355712805e-05, |
|
"loss": 0.6837, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.23784349313669656, |
|
"learning_rate": 4.95096126300984e-05, |
|
"loss": 0.6899, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.2103897994837222, |
|
"learning_rate": 4.912361124952948e-05, |
|
"loss": 0.7308, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.23734181510531857, |
|
"learning_rate": 4.873862995343139e-05, |
|
"loss": 0.6635, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.28754328918934463, |
|
"learning_rate": 4.835467646077656e-05, |
|
"loss": 0.7335, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2801177269384902, |
|
"learning_rate": 4.797175846992964e-05, |
|
"loss": 0.6538, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.20377437169807378, |
|
"learning_rate": 4.7589883658493296e-05, |
|
"loss": 0.7343, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2633015839389786, |
|
"learning_rate": 4.72090596831542e-05, |
|
"loss": 0.7279, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.3365363313211822, |
|
"learning_rate": 4.682929417952939e-05, |
|
"loss": 0.6493, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.26197423166598477, |
|
"learning_rate": 4.645059476201333e-05, |
|
"loss": 0.6823, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.22148899156479446, |
|
"learning_rate": 4.6072969023625165e-05, |
|
"loss": 0.7062, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.25093425817830756, |
|
"learning_rate": 4.5696424535856574e-05, |
|
"loss": 0.7058, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.29374695702116843, |
|
"learning_rate": 4.532096884851978e-05, |
|
"loss": 0.7307, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.24148367178848454, |
|
"learning_rate": 4.494660948959645e-05, |
|
"loss": 0.7908, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.29661735702318875, |
|
"learning_rate": 4.457335396508631e-05, |
|
"loss": 0.6051, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.2587416887380861, |
|
"learning_rate": 4.420120975885723e-05, |
|
"loss": 0.7074, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.25077785676951697, |
|
"learning_rate": 4.383018433249464e-05, |
|
"loss": 0.6776, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.26766730021230084, |
|
"learning_rate": 4.346028512515232e-05, |
|
"loss": 0.5915, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.26255599946956093, |
|
"learning_rate": 4.309151955340297e-05, |
|
"loss": 0.7202, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.23590120420333469, |
|
"learning_rate": 4.272389501108962e-05, |
|
"loss": 0.754, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.34961647734551377, |
|
"learning_rate": 4.2357418869177354e-05, |
|
"loss": 0.7484, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2567914752516631, |
|
"learning_rate": 4.1992098475605654e-05, |
|
"loss": 0.7758, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2859760124915357, |
|
"learning_rate": 4.162794115514078e-05, |
|
"loss": 0.7259, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.40354694842899336, |
|
"learning_rate": 4.1264954209229254e-05, |
|
"loss": 0.6554, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.31410918890445977, |
|
"learning_rate": 4.0903144915851174e-05, |
|
"loss": 0.6495, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.21920834371525927, |
|
"learning_rate": 4.054252052937444e-05, |
|
"loss": 0.6159, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2919386673597319, |
|
"learning_rate": 4.018308828040924e-05, |
|
"loss": 0.7266, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2907127546681081, |
|
"learning_rate": 3.982485537566321e-05, |
|
"loss": 0.7245, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.19919891868305908, |
|
"learning_rate": 3.946782899779667e-05, |
|
"loss": 0.6152, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2874798645438607, |
|
"learning_rate": 3.911201630527894e-05, |
|
"loss": 0.7391, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.29325850253665986, |
|
"learning_rate": 3.875742443224451e-05, |
|
"loss": 0.6121, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.3084377618826001, |
|
"learning_rate": 3.84040604883502e-05, |
|
"loss": 0.7255, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.2536420954931237, |
|
"learning_rate": 3.805193155863247e-05, |
|
"loss": 0.636, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.22759460676784352, |
|
"learning_rate": 3.770104470336555e-05, |
|
"loss": 0.717, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.22465050243662746, |
|
"learning_rate": 3.7351406957919636e-05, |
|
"loss": 0.726, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.30700513035899746, |
|
"learning_rate": 3.700302533262007e-05, |
|
"loss": 0.7157, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.3126415326969147, |
|
"learning_rate": 3.665590681260658e-05, |
|
"loss": 0.6963, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.3014647601543792, |
|
"learning_rate": 3.631005835769334e-05, |
|
"loss": 0.7982, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2519716019821263, |
|
"learning_rate": 3.59654869022294e-05, |
|
"loss": 0.7267, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2454160383073075, |
|
"learning_rate": 3.5622199354959677e-05, |
|
"loss": 0.8258, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.22670327761402992, |
|
"learning_rate": 3.5280202598886324e-05, |
|
"loss": 0.6447, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.24579997279476928, |
|
"learning_rate": 3.4939503491130934e-05, |
|
"loss": 0.6665, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2849552092195358, |
|
"learning_rate": 3.4600108862796796e-05, |
|
"loss": 0.7252, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.3060590068774777, |
|
"learning_rate": 3.426202551883213e-05, |
|
"loss": 0.7553, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.25467435210378453, |
|
"learning_rate": 3.392526023789349e-05, |
|
"loss": 0.7237, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.2276342572822446, |
|
"learning_rate": 3.358981977221006e-05, |
|
"loss": 0.6985, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.24334295151794627, |
|
"learning_rate": 3.325571084744803e-05, |
|
"loss": 0.6964, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.3063546229020857, |
|
"learning_rate": 3.292294016257598e-05, |
|
"loss": 0.69, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.26843353730082814, |
|
"learning_rate": 3.259151438973024e-05, |
|
"loss": 0.7961, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.28440009360240165, |
|
"learning_rate": 3.2261440174081525e-05, |
|
"loss": 0.7703, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.2307084225718035, |
|
"learning_rate": 3.1932724133701344e-05, |
|
"loss": 0.5392, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.29912287559451467, |
|
"learning_rate": 3.160537285942956e-05, |
|
"loss": 0.6836, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.278878639560016, |
|
"learning_rate": 3.1279392914742046e-05, |
|
"loss": 0.7267, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.32874234679202635, |
|
"learning_rate": 3.0954790835619195e-05, |
|
"loss": 0.6985, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.27469910090881833, |
|
"learning_rate": 3.06315731304148e-05, |
|
"loss": 0.7915, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.33032565430804056, |
|
"learning_rate": 3.0309746279725748e-05, |
|
"loss": 0.672, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.22509955106392437, |
|
"learning_rate": 2.998931673626175e-05, |
|
"loss": 0.6924, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.24954960719448557, |
|
"learning_rate": 2.9670290924716337e-05, |
|
"loss": 0.7648, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.18097577821872354, |
|
"learning_rate": 2.935267524163774e-05, |
|
"loss": 0.724, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2531814760979886, |
|
"learning_rate": 2.9036476055300765e-05, |
|
"loss": 0.7565, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.26537867964143863, |
|
"learning_rate": 2.872169970557913e-05, |
|
"loss": 0.7587, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2503354683898301, |
|
"learning_rate": 2.8408352503818357e-05, |
|
"loss": 0.7464, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.31309433063885866, |
|
"learning_rate": 2.8096440732709083e-05, |
|
"loss": 0.7215, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.273515150138006, |
|
"learning_rate": 2.778597064616133e-05, |
|
"loss": 0.7107, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.29261711206548846, |
|
"learning_rate": 2.7476948469178887e-05, |
|
"loss": 0.6895, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.2613608515124986, |
|
"learning_rate": 2.716938039773459e-05, |
|
"loss": 0.751, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.3275410166575591, |
|
"learning_rate": 2.6863272598646106e-05, |
|
"loss": 0.6056, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.243877381080221, |
|
"learning_rate": 2.6558631209452323e-05, |
|
"loss": 0.7083, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.2670921124404005, |
|
"learning_rate": 2.625546233829016e-05, |
|
"loss": 0.6769, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.26772171237531533, |
|
"learning_rate": 2.5953772063772252e-05, |
|
"loss": 0.7421, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.24929713338883455, |
|
"learning_rate": 2.5653566434864928e-05, |
|
"loss": 0.7006, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.27784311313283716, |
|
"learning_rate": 2.5354851470767037e-05, |
|
"loss": 0.6997, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.27642936475366325, |
|
"learning_rate": 2.5057633160789184e-05, |
|
"loss": 0.7078, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.2916050170142823, |
|
"learning_rate": 2.476191746423373e-05, |
|
"loss": 0.7769, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.29742464103075256, |
|
"learning_rate": 2.446771031027527e-05, |
|
"loss": 0.6983, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2520881480340573, |
|
"learning_rate": 2.4175017597841666e-05, |
|
"loss": 0.7945, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2456996066425862, |
|
"learning_rate": 2.3883845195495878e-05, |
|
"loss": 0.7445, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.20892213586106406, |
|
"learning_rate": 2.3594198941318248e-05, |
|
"loss": 0.6771, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.22864359852007016, |
|
"learning_rate": 2.330608464278953e-05, |
|
"loss": 0.7326, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2630175043544034, |
|
"learning_rate": 2.3019508076674267e-05, |
|
"loss": 0.6733, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.25018036673355504, |
|
"learning_rate": 2.273447498890521e-05, |
|
"loss": 0.8375, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2970455279248964, |
|
"learning_rate": 2.2450991094467865e-05, |
|
"loss": 0.665, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.24388424842231599, |
|
"learning_rate": 2.2169062077286075e-05, |
|
"loss": 0.6899, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2620919423557573, |
|
"learning_rate": 2.1888693590107946e-05, |
|
"loss": 0.7008, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.1597077931299718, |
|
"learning_rate": 2.1609891254392678e-05, |
|
"loss": 0.6285, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.24978397247092607, |
|
"learning_rate": 2.1332660660197602e-05, |
|
"loss": 0.6707, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.32226574046620643, |
|
"learning_rate": 2.1057007366066373e-05, |
|
"loss": 0.7732, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2633118389195218, |
|
"learning_rate": 2.0782936898917204e-05, |
|
"loss": 0.6722, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2065809642257561, |
|
"learning_rate": 2.0510454753932395e-05, |
|
"loss": 0.6913, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.2502396270636488, |
|
"learning_rate": 2.0239566394447874e-05, |
|
"loss": 0.7196, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2553706993409484, |
|
"learning_rate": 1.9970277251843862e-05, |
|
"loss": 0.7153, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.20930260480653445, |
|
"learning_rate": 1.9702592725435786e-05, |
|
"loss": 0.7252, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2624209534259566, |
|
"learning_rate": 1.9436518182366158e-05, |
|
"loss": 0.7354, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.2835973619438766, |
|
"learning_rate": 1.9172058957496876e-05, |
|
"loss": 0.7456, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.20758735208627346, |
|
"learning_rate": 1.8909220353302392e-05, |
|
"loss": 0.6972, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.34020958211549873, |
|
"learning_rate": 1.8648007639763176e-05, |
|
"loss": 0.7108, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.25980592525235724, |
|
"learning_rate": 1.838842605426031e-05, |
|
"loss": 0.6254, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.23905673738763042, |
|
"learning_rate": 1.813048080147025e-05, |
|
"loss": 0.6463, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.21218256948907488, |
|
"learning_rate": 1.7874177053260598e-05, |
|
"loss": 0.6492, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2000868258423336, |
|
"learning_rate": 1.7619519948586348e-05, |
|
"loss": 0.6001, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.103753197876994, |
|
"learning_rate": 1.736651459338695e-05, |
|
"loss": 0.6952, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2682682663167215, |
|
"learning_rate": 1.711516606048377e-05, |
|
"loss": 0.7109, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.25063878481734475, |
|
"learning_rate": 1.6865479389478545e-05, |
|
"loss": 0.7225, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2149654094754697, |
|
"learning_rate": 1.6617459586652196e-05, |
|
"loss": 0.6709, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.24092454032089566, |
|
"learning_rate": 1.6371111624864543e-05, |
|
"loss": 0.6764, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2454121794760406, |
|
"learning_rate": 1.612644044345456e-05, |
|
"loss": 0.6982, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.3041665993053294, |
|
"learning_rate": 1.5883450948141377e-05, |
|
"loss": 0.658, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2143063192080754, |
|
"learning_rate": 1.564214801092585e-05, |
|
"loss": 0.7606, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.23715373616102758, |
|
"learning_rate": 1.540253646999299e-05, |
|
"loss": 0.7262, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.266422273258976, |
|
"learning_rate": 1.5164621129614787e-05, |
|
"loss": 0.6795, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2303123926465437, |
|
"learning_rate": 1.4928406760054059e-05, |
|
"loss": 0.6995, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.30575921163679864, |
|
"learning_rate": 1.4693898097468662e-05, |
|
"loss": 0.7238, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2723263223045365, |
|
"learning_rate": 1.4461099843816684e-05, |
|
"loss": 0.694, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2704567901239254, |
|
"learning_rate": 1.4230016666761981e-05, |
|
"loss": 0.7654, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.24548577793970916, |
|
"learning_rate": 1.4000653199580782e-05, |
|
"loss": 0.766, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.31075109788590083, |
|
"learning_rate": 1.3773014041068633e-05, |
|
"loss": 0.7156, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.23774900014129338, |
|
"learning_rate": 1.3547103755448287e-05, |
|
"loss": 0.7829, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.20200953268790334, |
|
"learning_rate": 1.3322926872278097e-05, |
|
"loss": 0.7726, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.26897850573503673, |
|
"learning_rate": 1.3100487886361379e-05, |
|
"loss": 0.7458, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.2397184808414706, |
|
"learning_rate": 1.2879791257656049e-05, |
|
"loss": 0.7193, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.20474419583059367, |
|
"learning_rate": 1.266084141118542e-05, |
|
"loss": 0.6816, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.22050960020614993, |
|
"learning_rate": 1.2443642736949258e-05, |
|
"loss": 0.7013, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.22350828170159398, |
|
"learning_rate": 1.2228199589835999e-05, |
|
"loss": 0.7583, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.27678043547723374, |
|
"learning_rate": 1.2014516289535249e-05, |
|
"loss": 0.7093, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2641609501988626, |
|
"learning_rate": 1.1802597120451286e-05, |
|
"loss": 0.7644, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.29287525021588734, |
|
"learning_rate": 1.1592446331617068e-05, |
|
"loss": 0.6913, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.3132569389457329, |
|
"learning_rate": 1.1384068136609105e-05, |
|
"loss": 0.7836, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.3257533444384463, |
|
"learning_rate": 1.1177466713462926e-05, |
|
"loss": 0.6575, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.3658464833825346, |
|
"learning_rate": 1.0972646204589377e-05, |
|
"loss": 0.6571, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.24393335900845253, |
|
"learning_rate": 1.076961071669148e-05, |
|
"loss": 0.7282, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2673318448109565, |
|
"learning_rate": 1.0568364320682178e-05, |
|
"loss": 0.7788, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.24802808118552172, |
|
"learning_rate": 1.0368911051602636e-05, |
|
"loss": 0.6588, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2725189792906911, |
|
"learning_rate": 1.0171254908541372e-05, |
|
"loss": 0.6729, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.3108637636142955, |
|
"learning_rate": 9.975399854554068e-06, |
|
"loss": 0.6717, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2770989947603813, |
|
"learning_rate": 9.781349816584162e-06, |
|
"loss": 0.7259, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.23614090997589118, |
|
"learning_rate": 9.589108685383975e-06, |
|
"loss": 0.7101, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2808653787629317, |
|
"learning_rate": 9.398680315436903e-06, |
|
"loss": 0.6933, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.24876373568373047, |
|
"learning_rate": 9.210068524879923e-06, |
|
"loss": 0.6842, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2678931806909137, |
|
"learning_rate": 9.023277095427173e-06, |
|
"loss": 0.7012, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.25424385975928604, |
|
"learning_rate": 8.838309772294085e-06, |
|
"loss": 0.7443, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2402611926537631, |
|
"learning_rate": 8.655170264122303e-06, |
|
"loss": 0.7288, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.24815108653858006, |
|
"learning_rate": 8.473862242905339e-06, |
|
"loss": 0.7894, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.21869677597116047, |
|
"learning_rate": 8.294389343914899e-06, |
|
"loss": 0.6481, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.2515057283571701, |
|
"learning_rate": 8.11675516562802e-06, |
|
"loss": 0.7195, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.24394419541003282, |
|
"learning_rate": 7.940963269654922e-06, |
|
"loss": 0.8022, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2745733416076033, |
|
"learning_rate": 7.767017180667645e-06, |
|
"loss": 0.7189, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.19526287456709943, |
|
"learning_rate": 7.594920386329252e-06, |
|
"loss": 0.6829, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.3408386827932112, |
|
"learning_rate": 7.4246763372240795e-06, |
|
"loss": 0.6084, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.22382037366933571, |
|
"learning_rate": 7.256288446788362e-06, |
|
"loss": 0.6839, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.25700615722112985, |
|
"learning_rate": 7.089760091241937e-06, |
|
"loss": 0.766, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.20934034960089873, |
|
"learning_rate": 6.925094609520455e-06, |
|
"loss": 0.6959, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1887454649423391, |
|
"learning_rate": 6.762295303208532e-06, |
|
"loss": 0.6084, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.24866922377491815, |
|
"learning_rate": 6.601365436473439e-06, |
|
"loss": 0.76, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.26188268702284273, |
|
"learning_rate": 6.44230823599975e-06, |
|
"loss": 0.716, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.29822606608650276, |
|
"learning_rate": 6.2851268909245865e-06, |
|
"loss": 0.6727, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2732588418921964, |
|
"learning_rate": 6.1298245527737e-06, |
|
"loss": 0.7553, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.24961501560552232, |
|
"learning_rate": 5.976404335398256e-06, |
|
"loss": 0.6262, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.36125761108834664, |
|
"learning_rate": 5.824869314912473e-06, |
|
"loss": 0.702, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.20211776842690213, |
|
"learning_rate": 5.675222529631841e-06, |
|
"loss": 0.7146, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.34319105583503823, |
|
"learning_rate": 5.527466980012297e-06, |
|
"loss": 0.7202, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2551086799124065, |
|
"learning_rate": 5.381605628590003e-06, |
|
"loss": 0.7343, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.24191784730605104, |
|
"learning_rate": 5.237641399921955e-06, |
|
"loss": 0.676, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.23642128893948972, |
|
"learning_rate": 5.095577180527378e-06, |
|
"loss": 0.75, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.24384299779088495, |
|
"learning_rate": 4.9554158188298445e-06, |
|
"loss": 0.7689, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2622807786449119, |
|
"learning_rate": 4.817160125100106e-06, |
|
"loss": 0.7586, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2849253620392849, |
|
"learning_rate": 4.680812871399854e-06, |
|
"loss": 0.7066, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.24191852621283047, |
|
"learning_rate": 4.546376791525975e-06, |
|
"loss": 0.7219, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.2212643288267702, |
|
"learning_rate": 4.413854580955945e-06, |
|
"loss": 0.7128, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.23498346434116985, |
|
"learning_rate": 4.2832488967935795e-06, |
|
"loss": 0.7419, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.24375104729509095, |
|
"learning_rate": 4.154562357715952e-06, |
|
"loss": 0.7244, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.2349397771494649, |
|
"learning_rate": 4.02779754392072e-06, |
|
"loss": 0.686, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.2657929830383169, |
|
"learning_rate": 3.90295699707447e-06, |
|
"loss": 0.7006, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.313209415878294, |
|
"learning_rate": 3.780043220261764e-06, |
|
"loss": 0.7512, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.2555739248275014, |
|
"learning_rate": 3.659058677934957e-06, |
|
"loss": 0.7011, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.33802960858245146, |
|
"learning_rate": 3.540005795864709e-06, |
|
"loss": 0.6056, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.1922803471445042, |
|
"learning_rate": 3.422886961091476e-06, |
|
"loss": 0.6843, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.29061663111511976, |
|
"learning_rate": 3.3077045218775192e-06, |
|
"loss": 0.7806, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.247673796937873, |
|
"learning_rate": 3.194460787659892e-06, |
|
"loss": 0.7209, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2560647317152861, |
|
"learning_rate": 3.0831580290041184e-06, |
|
"loss": 0.7605, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.3634088101447885, |
|
"learning_rate": 2.973798477558709e-06, |
|
"loss": 0.7043, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.18733318063360602, |
|
"learning_rate": 2.8663843260103074e-06, |
|
"loss": 0.6693, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.21584750971265176, |
|
"learning_rate": 2.76091772803988e-06, |
|
"loss": 0.7167, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.3177595541385448, |
|
"learning_rate": 2.6574007982793857e-06, |
|
"loss": 0.7402, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.28268884396101174, |
|
"learning_rate": 2.555835612269475e-06, |
|
"loss": 0.6604, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.23788632053961686, |
|
"learning_rate": 2.456224206417812e-06, |
|
"loss": 0.6957, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.26249888785251196, |
|
"learning_rate": 2.35856857795832e-06, |
|
"loss": 0.7676, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.4060765673216887, |
|
"learning_rate": 2.262870684911045e-06, |
|
"loss": 0.6793, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.3094366288959559, |
|
"learning_rate": 2.169132446042976e-06, |
|
"loss": 0.6742, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.23238542996653788, |
|
"learning_rate": 2.0773557408295343e-06, |
|
"loss": 0.7142, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.24241260570013387, |
|
"learning_rate": 1.9875424094168647e-06, |
|
"loss": 0.694, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.24945986800032627, |
|
"learning_rate": 1.8996942525850047e-06, |
|
"loss": 0.6959, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.2943157341191807, |
|
"learning_rate": 1.813813031711742e-06, |
|
"loss": 0.6987, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.2501518225722179, |
|
"learning_rate": 1.7299004687372665e-06, |
|
"loss": 0.6767, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.2773238988005913, |
|
"learning_rate": 1.6479582461297217e-06, |
|
"loss": 0.6753, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.24642550651253037, |
|
"learning_rate": 1.5679880068514174e-06, |
|
"loss": 0.6836, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.23944679732673346, |
|
"learning_rate": 1.4899913543258814e-06, |
|
"loss": 0.7099, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.321999340037235, |
|
"learning_rate": 1.4139698524057165e-06, |
|
"loss": 0.7798, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.3325659255065644, |
|
"learning_rate": 1.3399250253413154e-06, |
|
"loss": 0.767, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.30837054124513025, |
|
"learning_rate": 1.2678583577501624e-06, |
|
"loss": 0.6706, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.31468665328396034, |
|
"learning_rate": 1.1977712945872243e-06, |
|
"loss": 0.7187, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2858601801052754, |
|
"learning_rate": 1.1296652411158182e-06, |
|
"loss": 0.7022, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.24729340626419855, |
|
"learning_rate": 1.0635415628795665e-06, |
|
"loss": 0.691, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.23960085243766105, |
|
"learning_rate": 9.994015856749527e-07, |
|
"loss": 0.7091, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.20680354782081353, |
|
"learning_rate": 9.372465955247544e-07, |
|
"loss": 0.6042, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2644774077674383, |
|
"learning_rate": 8.770778386522627e-07, |
|
"loss": 0.7627, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2557259933660033, |
|
"learning_rate": 8.188965214562804e-07, |
|
"loss": 0.7372, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2637639760583473, |
|
"learning_rate": 7.627038104869199e-07, |
|
"loss": 0.6702, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.28884424646282286, |
|
"learning_rate": 7.085008324222763e-07, |
|
"loss": 0.6375, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.27748515379743166, |
|
"learning_rate": 6.562886740457797e-07, |
|
"loss": 0.7361, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.2323425377751082, |
|
"learning_rate": 6.060683822244117e-07, |
|
"loss": 0.6933, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.357455400684556, |
|
"learning_rate": 5.578409638877457e-07, |
|
"loss": 0.5931, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.2724599339735716, |
|
"learning_rate": 5.116073860077064e-07, |
|
"loss": 0.6707, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.30393724062041544, |
|
"learning_rate": 4.6736857557925227e-07, |
|
"loss": 0.6907, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.30371804708625333, |
|
"learning_rate": 4.2512541960171294e-07, |
|
"loss": 0.7718, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.35894189655237235, |
|
"learning_rate": 3.8487876506106966e-07, |
|
"loss": 0.7522, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.24596908002441525, |
|
"learning_rate": 3.466294189129249e-07, |
|
"loss": 0.6583, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2920176199203356, |
|
"learning_rate": 3.1037814806634815e-07, |
|
"loss": 0.707, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2180144616080907, |
|
"learning_rate": 2.7612567936849964e-07, |
|
"loss": 0.6582, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.26484981601847507, |
|
"learning_rate": 2.43872699590042e-07, |
|
"loss": 0.6977, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2626803702380842, |
|
"learning_rate": 2.136198554113844e-07, |
|
"loss": 0.7858, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2744308629524476, |
|
"learning_rate": 1.8536775340970425e-07, |
|
"loss": 0.7359, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.2397873340818912, |
|
"learning_rate": 1.591169600468123e-07, |
|
"loss": 0.7332, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.261016022377339, |
|
"learning_rate": 1.348680016577397e-07, |
|
"loss": 0.7853, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.26956288380911886, |
|
"learning_rate": 1.126213644402463e-07, |
|
"loss": 0.7287, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.250467321318094, |
|
"learning_rate": 9.237749444505062e-08, |
|
"loss": 0.6572, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.2693162150713498, |
|
"learning_rate": 7.413679756684832e-08, |
|
"loss": 0.7318, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.29918573890964684, |
|
"learning_rate": 5.7899639536251883e-08, |
|
"loss": 0.707, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.27475327570345426, |
|
"learning_rate": 4.366634591237428e-08, |
|
"loss": 0.7348, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.21723287925397622, |
|
"learning_rate": 3.143720207635648e-08, |
|
"loss": 0.6721, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.35560080211348427, |
|
"learning_rate": 2.1212453225627482e-08, |
|
"loss": 0.7261, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.30524103845820444, |
|
"learning_rate": 1.299230436898613e-08, |
|
"loss": 0.7778, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2662759700171976, |
|
"learning_rate": 6.776920322515423e-09, |
|
"loss": 0.6589, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2828428158719728, |
|
"learning_rate": 2.566425706218567e-09, |
|
"loss": 0.6907, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.23136076886647983, |
|
"learning_rate": 3.609049415764787e-10, |
|
"loss": 0.7324, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": NaN, |
|
"eval_runtime": 1502.4585, |
|
"eval_samples_per_second": 1.538, |
|
"eval_steps_per_second": 0.385, |
|
"step": 3898 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 3898, |
|
"total_flos": 1.0023612803710976e+16, |
|
"train_loss": 0.45687119700224843, |
|
"train_runtime": 26657.4924, |
|
"train_samples_per_second": 2.339, |
|
"train_steps_per_second": 0.146 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 3898, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 1.0023612803710976e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|