|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.979633401221996, |
|
"eval_steps": 500, |
|
"global_step": 2450, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004073319755600814, |
|
"grad_norm": 12.219289779663086, |
|
"learning_rate": 8.16326530612245e-07, |
|
"loss": 2.0786, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.020366598778004074, |
|
"grad_norm": 12.530974388122559, |
|
"learning_rate": 4.081632653061224e-06, |
|
"loss": 2.0708, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04073319755600815, |
|
"grad_norm": 10.991973876953125, |
|
"learning_rate": 8.163265306122448e-06, |
|
"loss": 1.9589, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06109979633401222, |
|
"grad_norm": 11.441458702087402, |
|
"learning_rate": 1.2244897959183674e-05, |
|
"loss": 1.6878, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0814663951120163, |
|
"grad_norm": 4.176149845123291, |
|
"learning_rate": 1.6326530612244897e-05, |
|
"loss": 1.3224, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10183299389002037, |
|
"grad_norm": 2.458606004714966, |
|
"learning_rate": 2.0408163265306123e-05, |
|
"loss": 1.1912, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.12219959266802444, |
|
"grad_norm": 1.30384361743927, |
|
"learning_rate": 2.448979591836735e-05, |
|
"loss": 1.1123, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1425661914460285, |
|
"grad_norm": 1.0811594724655151, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 1.0462, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1629327902240326, |
|
"grad_norm": 0.7156168222427368, |
|
"learning_rate": 3.265306122448979e-05, |
|
"loss": 0.992, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18329938900203666, |
|
"grad_norm": 0.6059057116508484, |
|
"learning_rate": 3.673469387755102e-05, |
|
"loss": 0.9393, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.20366598778004075, |
|
"grad_norm": 0.5178468227386475, |
|
"learning_rate": 4.0816326530612245e-05, |
|
"loss": 0.8967, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2240325865580448, |
|
"grad_norm": 0.45643067359924316, |
|
"learning_rate": 4.4897959183673474e-05, |
|
"loss": 0.8665, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.24439918533604887, |
|
"grad_norm": 0.4627813994884491, |
|
"learning_rate": 4.89795918367347e-05, |
|
"loss": 0.8554, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.26476578411405294, |
|
"grad_norm": 0.7451216578483582, |
|
"learning_rate": 5.3061224489795926e-05, |
|
"loss": 0.8335, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.285132382892057, |
|
"grad_norm": 0.4662891924381256, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 0.827, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3054989816700611, |
|
"grad_norm": 0.42819878458976746, |
|
"learning_rate": 6.122448979591838e-05, |
|
"loss": 0.816, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3258655804480652, |
|
"grad_norm": 0.4619337022304535, |
|
"learning_rate": 6.530612244897959e-05, |
|
"loss": 0.7968, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.34623217922606925, |
|
"grad_norm": 0.4922891855239868, |
|
"learning_rate": 6.938775510204082e-05, |
|
"loss": 0.7954, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3665987780040733, |
|
"grad_norm": 0.5278509259223938, |
|
"learning_rate": 7.346938775510205e-05, |
|
"loss": 0.7953, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3869653767820774, |
|
"grad_norm": 0.912343442440033, |
|
"learning_rate": 7.755102040816327e-05, |
|
"loss": 0.7773, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4073319755600815, |
|
"grad_norm": 0.5746473073959351, |
|
"learning_rate": 8.163265306122449e-05, |
|
"loss": 0.775, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.42769857433808556, |
|
"grad_norm": 0.4901811480522156, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 0.7708, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4480651731160896, |
|
"grad_norm": 0.596023678779602, |
|
"learning_rate": 8.979591836734695e-05, |
|
"loss": 0.761, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.4684317718940937, |
|
"grad_norm": 0.9318987131118774, |
|
"learning_rate": 9.387755102040817e-05, |
|
"loss": 0.7572, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.48879837067209775, |
|
"grad_norm": 0.648429811000824, |
|
"learning_rate": 9.79591836734694e-05, |
|
"loss": 0.7652, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5091649694501018, |
|
"grad_norm": 0.4797033369541168, |
|
"learning_rate": 0.00010204081632653062, |
|
"loss": 0.7563, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5295315682281059, |
|
"grad_norm": 0.9584823250770569, |
|
"learning_rate": 0.00010612244897959185, |
|
"loss": 0.7532, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5498981670061099, |
|
"grad_norm": 0.6396449208259583, |
|
"learning_rate": 0.00011020408163265306, |
|
"loss": 0.7438, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.570264765784114, |
|
"grad_norm": 0.8127761483192444, |
|
"learning_rate": 0.00011428571428571428, |
|
"loss": 0.7367, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5906313645621182, |
|
"grad_norm": 0.6745628118515015, |
|
"learning_rate": 0.00011836734693877552, |
|
"loss": 0.7284, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6109979633401222, |
|
"grad_norm": 0.5591239929199219, |
|
"learning_rate": 0.00012244897959183676, |
|
"loss": 0.7337, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6313645621181263, |
|
"grad_norm": 0.7267659902572632, |
|
"learning_rate": 0.00012653061224489798, |
|
"loss": 0.7264, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6517311608961304, |
|
"grad_norm": 0.5548218488693237, |
|
"learning_rate": 0.00013061224489795917, |
|
"loss": 0.7301, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6720977596741344, |
|
"grad_norm": 0.9011363387107849, |
|
"learning_rate": 0.0001346938775510204, |
|
"loss": 0.72, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6924643584521385, |
|
"grad_norm": 0.5213197469711304, |
|
"learning_rate": 0.00013877551020408165, |
|
"loss": 0.7243, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7128309572301426, |
|
"grad_norm": 0.476764440536499, |
|
"learning_rate": 0.00014285714285714287, |
|
"loss": 0.7198, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7331975560081466, |
|
"grad_norm": 0.7126782536506653, |
|
"learning_rate": 0.0001469387755102041, |
|
"loss": 0.7177, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7535641547861507, |
|
"grad_norm": 0.4389437735080719, |
|
"learning_rate": 0.0001510204081632653, |
|
"loss": 0.7143, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7739307535641547, |
|
"grad_norm": 0.5171541571617126, |
|
"learning_rate": 0.00015510204081632654, |
|
"loss": 0.714, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7942973523421588, |
|
"grad_norm": 0.5747414827346802, |
|
"learning_rate": 0.00015918367346938776, |
|
"loss": 0.7192, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.814663951120163, |
|
"grad_norm": 0.703008770942688, |
|
"learning_rate": 0.00016326530612244898, |
|
"loss": 0.7094, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.835030549898167, |
|
"grad_norm": 0.41933196783065796, |
|
"learning_rate": 0.00016734693877551023, |
|
"loss": 0.7027, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8553971486761711, |
|
"grad_norm": 0.8496442437171936, |
|
"learning_rate": 0.00017142857142857143, |
|
"loss": 0.7086, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8757637474541752, |
|
"grad_norm": 0.44953274726867676, |
|
"learning_rate": 0.00017551020408163265, |
|
"loss": 0.7104, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.8961303462321792, |
|
"grad_norm": 0.49215880036354065, |
|
"learning_rate": 0.0001795918367346939, |
|
"loss": 0.6998, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9164969450101833, |
|
"grad_norm": 0.4515603482723236, |
|
"learning_rate": 0.00018367346938775512, |
|
"loss": 0.6994, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9368635437881874, |
|
"grad_norm": 0.48363256454467773, |
|
"learning_rate": 0.00018775510204081634, |
|
"loss": 0.697, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9572301425661914, |
|
"grad_norm": 0.504764974117279, |
|
"learning_rate": 0.00019183673469387756, |
|
"loss": 0.6937, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9775967413441955, |
|
"grad_norm": 0.6177113056182861, |
|
"learning_rate": 0.0001959183673469388, |
|
"loss": 0.702, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9979633401221996, |
|
"grad_norm": 0.6133261919021606, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6982, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9979633401221996, |
|
"eval_loss": 1.8247634172439575, |
|
"eval_runtime": 0.3801, |
|
"eval_samples_per_second": 31.574, |
|
"eval_steps_per_second": 2.631, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.0183299389002036, |
|
"grad_norm": 0.6387724876403809, |
|
"learning_rate": 0.00019999746258949147, |
|
"loss": 0.6786, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0386965376782078, |
|
"grad_norm": 0.4179505705833435, |
|
"learning_rate": 0.00019998985048673486, |
|
"loss": 0.6806, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.0590631364562118, |
|
"grad_norm": 0.7707039713859558, |
|
"learning_rate": 0.0001999771640780308, |
|
"loss": 0.6828, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.079429735234216, |
|
"grad_norm": 0.735935628414154, |
|
"learning_rate": 0.0001999594040071918, |
|
"loss": 0.6832, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.0997963340122199, |
|
"grad_norm": 0.6434056758880615, |
|
"learning_rate": 0.00019993657117550973, |
|
"loss": 0.681, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.120162932790224, |
|
"grad_norm": 0.7113597393035889, |
|
"learning_rate": 0.00019990866674170983, |
|
"loss": 0.6735, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.140529531568228, |
|
"grad_norm": 0.7098330855369568, |
|
"learning_rate": 0.00019987569212189224, |
|
"loss": 0.6786, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.1608961303462322, |
|
"grad_norm": 0.38251993060112, |
|
"learning_rate": 0.0001998376489894599, |
|
"loss": 0.6794, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.1812627291242364, |
|
"grad_norm": 0.6434310674667358, |
|
"learning_rate": 0.00019979453927503364, |
|
"loss": 0.6846, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.2016293279022403, |
|
"grad_norm": 0.5492526292800903, |
|
"learning_rate": 0.00019974636516635434, |
|
"loss": 0.6697, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.2219959266802445, |
|
"grad_norm": 0.48028430342674255, |
|
"learning_rate": 0.00019969312910817183, |
|
"loss": 0.6778, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.2423625254582484, |
|
"grad_norm": 0.35454341769218445, |
|
"learning_rate": 0.00019963483380212068, |
|
"loss": 0.6671, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.2627291242362526, |
|
"grad_norm": 0.3555687665939331, |
|
"learning_rate": 0.00019957148220658345, |
|
"loss": 0.6646, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.2830957230142566, |
|
"grad_norm": 0.5027297735214233, |
|
"learning_rate": 0.00019950307753654017, |
|
"loss": 0.6723, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.3034623217922607, |
|
"grad_norm": 0.34120357036590576, |
|
"learning_rate": 0.00019942962326340537, |
|
"loss": 0.6669, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.3238289205702647, |
|
"grad_norm": 0.3805827498435974, |
|
"learning_rate": 0.000199351123114852, |
|
"loss": 0.6682, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.3441955193482689, |
|
"grad_norm": 0.4035264551639557, |
|
"learning_rate": 0.00019926758107462206, |
|
"loss": 0.6665, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.364562118126273, |
|
"grad_norm": 0.43114498257637024, |
|
"learning_rate": 0.0001991790013823246, |
|
"loss": 0.6687, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.384928716904277, |
|
"grad_norm": 0.43574780225753784, |
|
"learning_rate": 0.00019908538853322048, |
|
"loss": 0.6648, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.405295315682281, |
|
"grad_norm": 0.36844050884246826, |
|
"learning_rate": 0.00019898674727799419, |
|
"loss": 0.6635, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.4256619144602851, |
|
"grad_norm": 0.360999196767807, |
|
"learning_rate": 0.00019888308262251285, |
|
"loss": 0.6715, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.4460285132382893, |
|
"grad_norm": 0.47190627455711365, |
|
"learning_rate": 0.0001987743998275723, |
|
"loss": 0.6595, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.4663951120162932, |
|
"grad_norm": 0.4380181133747101, |
|
"learning_rate": 0.00019866070440862976, |
|
"loss": 0.6589, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.4867617107942974, |
|
"grad_norm": 0.3873290419578552, |
|
"learning_rate": 0.00019854200213552424, |
|
"loss": 0.6649, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.5071283095723014, |
|
"grad_norm": 0.4168381094932556, |
|
"learning_rate": 0.00019841829903218376, |
|
"loss": 0.6673, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.5274949083503055, |
|
"grad_norm": 0.4359332025051117, |
|
"learning_rate": 0.00019828960137631928, |
|
"loss": 0.6666, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.5478615071283097, |
|
"grad_norm": 0.33967724442481995, |
|
"learning_rate": 0.00019815591569910654, |
|
"loss": 0.6663, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.5682281059063137, |
|
"grad_norm": 0.33921679854393005, |
|
"learning_rate": 0.00019801724878485438, |
|
"loss": 0.6557, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.5885947046843176, |
|
"grad_norm": 0.3741433024406433, |
|
"learning_rate": 0.00019787360767066053, |
|
"loss": 0.6522, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.6089613034623218, |
|
"grad_norm": 0.3441908359527588, |
|
"learning_rate": 0.0001977249996460544, |
|
"loss": 0.653, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.629327902240326, |
|
"grad_norm": 0.36859628558158875, |
|
"learning_rate": 0.00019757143225262728, |
|
"loss": 0.6582, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.64969450101833, |
|
"grad_norm": 0.3605320453643799, |
|
"learning_rate": 0.00019741291328364955, |
|
"loss": 0.6546, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.6700610997963339, |
|
"grad_norm": 0.6110367178916931, |
|
"learning_rate": 0.00019724945078367513, |
|
"loss": 0.6567, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.690427698574338, |
|
"grad_norm": 0.3475343585014343, |
|
"learning_rate": 0.00019708105304813332, |
|
"loss": 0.6658, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.7107942973523422, |
|
"grad_norm": 0.45948854088783264, |
|
"learning_rate": 0.0001969077286229078, |
|
"loss": 0.6584, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.7311608961303462, |
|
"grad_norm": 0.31532639265060425, |
|
"learning_rate": 0.00019672948630390294, |
|
"loss": 0.6579, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.7515274949083504, |
|
"grad_norm": 0.3328857421875, |
|
"learning_rate": 0.00019654633513659743, |
|
"loss": 0.6583, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.7718940936863543, |
|
"grad_norm": 0.3420257866382599, |
|
"learning_rate": 0.00019635828441558515, |
|
"loss": 0.6493, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.7922606924643585, |
|
"grad_norm": 0.31616052985191345, |
|
"learning_rate": 0.00019616534368410365, |
|
"loss": 0.6536, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.8126272912423627, |
|
"grad_norm": 0.30928483605384827, |
|
"learning_rate": 0.0001959675227335497, |
|
"loss": 0.6583, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.8329938900203666, |
|
"grad_norm": 0.3872964084148407, |
|
"learning_rate": 0.00019576483160298246, |
|
"loss": 0.6467, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.8533604887983706, |
|
"grad_norm": 0.3449136018753052, |
|
"learning_rate": 0.0001955572805786141, |
|
"loss": 0.6513, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.8737270875763747, |
|
"grad_norm": 0.3003346621990204, |
|
"learning_rate": 0.00019534488019328752, |
|
"loss": 0.6542, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.894093686354379, |
|
"grad_norm": 0.3554345965385437, |
|
"learning_rate": 0.0001951276412259422, |
|
"loss": 0.6496, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.9144602851323829, |
|
"grad_norm": 0.3109475374221802, |
|
"learning_rate": 0.00019490557470106686, |
|
"loss": 0.6544, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.9348268839103868, |
|
"grad_norm": 0.35385647416114807, |
|
"learning_rate": 0.00019467869188814023, |
|
"loss": 0.6456, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.955193482688391, |
|
"grad_norm": 0.3411131203174591, |
|
"learning_rate": 0.00019444700430105893, |
|
"loss": 0.6478, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.9755600814663952, |
|
"grad_norm": 0.3149469792842865, |
|
"learning_rate": 0.00019421052369755334, |
|
"loss": 0.6541, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.9959266802443993, |
|
"grad_norm": 0.4090039134025574, |
|
"learning_rate": 0.00019396926207859084, |
|
"loss": 0.6596, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.833816647529602, |
|
"eval_runtime": 0.3787, |
|
"eval_samples_per_second": 31.686, |
|
"eval_steps_per_second": 2.641, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.016293279022403, |
|
"grad_norm": 0.4153566062450409, |
|
"learning_rate": 0.0001937232316877668, |
|
"loss": 0.6285, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.0366598778004072, |
|
"grad_norm": 0.41273799538612366, |
|
"learning_rate": 0.00019347244501068312, |
|
"loss": 0.6309, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.0570264765784114, |
|
"grad_norm": 0.40306323766708374, |
|
"learning_rate": 0.00019321691477431485, |
|
"loss": 0.6294, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.0773930753564156, |
|
"grad_norm": 0.37823185324668884, |
|
"learning_rate": 0.00019295665394636414, |
|
"loss": 0.6333, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.0977596741344193, |
|
"grad_norm": 0.31898233294487, |
|
"learning_rate": 0.0001926916757346022, |
|
"loss": 0.6217, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.1181262729124235, |
|
"grad_norm": 0.3543696105480194, |
|
"learning_rate": 0.00019242199358619893, |
|
"loss": 0.6268, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.1384928716904277, |
|
"grad_norm": 0.4415835738182068, |
|
"learning_rate": 0.00019214762118704076, |
|
"loss": 0.6252, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.158859470468432, |
|
"grad_norm": 0.40828588604927063, |
|
"learning_rate": 0.00019186857246103584, |
|
"loss": 0.624, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.179226069246436, |
|
"grad_norm": 0.37648454308509827, |
|
"learning_rate": 0.00019158486156940757, |
|
"loss": 0.6228, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.1995926680244398, |
|
"grad_norm": 0.3546181321144104, |
|
"learning_rate": 0.0001912965029099759, |
|
"loss": 0.62, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.219959266802444, |
|
"grad_norm": 0.3437698185443878, |
|
"learning_rate": 0.00019100351111642666, |
|
"loss": 0.6228, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.240325865580448, |
|
"grad_norm": 0.3919028639793396, |
|
"learning_rate": 0.000190705901057569, |
|
"loss": 0.623, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.2606924643584523, |
|
"grad_norm": 0.36673861742019653, |
|
"learning_rate": 0.00019040368783658073, |
|
"loss": 0.6222, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.281059063136456, |
|
"grad_norm": 0.3100906312465668, |
|
"learning_rate": 0.0001900968867902419, |
|
"loss": 0.6276, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.30142566191446, |
|
"grad_norm": 0.33423855900764465, |
|
"learning_rate": 0.00018978551348815652, |
|
"loss": 0.6304, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.3217922606924644, |
|
"grad_norm": 0.3217325210571289, |
|
"learning_rate": 0.00018946958373196231, |
|
"loss": 0.6255, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.3421588594704685, |
|
"grad_norm": 0.32845550775527954, |
|
"learning_rate": 0.00018914911355452895, |
|
"loss": 0.6276, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.3625254582484727, |
|
"grad_norm": 0.3502614498138428, |
|
"learning_rate": 0.0001888241192191444, |
|
"loss": 0.6345, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.3828920570264764, |
|
"grad_norm": 0.3093807101249695, |
|
"learning_rate": 0.00018849461721868945, |
|
"loss": 0.6231, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.4032586558044806, |
|
"grad_norm": 0.34325292706489563, |
|
"learning_rate": 0.0001881606242748009, |
|
"loss": 0.6356, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.423625254582485, |
|
"grad_norm": 0.3460986018180847, |
|
"learning_rate": 0.00018782215733702286, |
|
"loss": 0.6228, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.443991853360489, |
|
"grad_norm": 0.31046947836875916, |
|
"learning_rate": 0.00018747923358194662, |
|
"loss": 0.6264, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.4643584521384927, |
|
"grad_norm": 0.318236380815506, |
|
"learning_rate": 0.00018713187041233896, |
|
"loss": 0.6251, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.484725050916497, |
|
"grad_norm": 0.33597373962402344, |
|
"learning_rate": 0.000186780085456259, |
|
"loss": 0.6333, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.505091649694501, |
|
"grad_norm": 0.32059183716773987, |
|
"learning_rate": 0.0001864238965661637, |
|
"loss": 0.6322, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.525458248472505, |
|
"grad_norm": 0.31177854537963867, |
|
"learning_rate": 0.00018606332181800165, |
|
"loss": 0.6254, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.5458248472505094, |
|
"grad_norm": 0.35773152112960815, |
|
"learning_rate": 0.00018569837951029595, |
|
"loss": 0.6282, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.566191446028513, |
|
"grad_norm": 0.31747111678123474, |
|
"learning_rate": 0.00018532908816321558, |
|
"loss": 0.6229, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.5865580448065173, |
|
"grad_norm": 0.3057750463485718, |
|
"learning_rate": 0.0001849554665176354, |
|
"loss": 0.6257, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.6069246435845215, |
|
"grad_norm": 0.30599597096443176, |
|
"learning_rate": 0.0001845775335341852, |
|
"loss": 0.6199, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.627291242362525, |
|
"grad_norm": 0.3067176043987274, |
|
"learning_rate": 0.0001841953083922875, |
|
"loss": 0.6244, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.6476578411405294, |
|
"grad_norm": 0.32527825236320496, |
|
"learning_rate": 0.00018380881048918405, |
|
"loss": 0.6258, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.6680244399185336, |
|
"grad_norm": 0.3099977970123291, |
|
"learning_rate": 0.00018341805943895178, |
|
"loss": 0.6298, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.6883910386965377, |
|
"grad_norm": 0.3218511641025543, |
|
"learning_rate": 0.00018302307507150702, |
|
"loss": 0.624, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.708757637474542, |
|
"grad_norm": 0.3705076277256012, |
|
"learning_rate": 0.0001826238774315995, |
|
"loss": 0.6219, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.729124236252546, |
|
"grad_norm": 0.4094727337360382, |
|
"learning_rate": 0.00018222048677779494, |
|
"loss": 0.6244, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.74949083503055, |
|
"grad_norm": 0.327188640832901, |
|
"learning_rate": 0.00018181292358144703, |
|
"loss": 0.6195, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.769857433808554, |
|
"grad_norm": 0.3251926302909851, |
|
"learning_rate": 0.0001814012085256585, |
|
"loss": 0.6235, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.790224032586558, |
|
"grad_norm": 0.320218950510025, |
|
"learning_rate": 0.00018098536250423154, |
|
"loss": 0.6236, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.810590631364562, |
|
"grad_norm": 0.32766249775886536, |
|
"learning_rate": 0.00018056540662060745, |
|
"loss": 0.6188, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.830957230142566, |
|
"grad_norm": 0.35727354884147644, |
|
"learning_rate": 0.00018014136218679567, |
|
"loss": 0.619, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.8513238289205702, |
|
"grad_norm": 0.32284310460090637, |
|
"learning_rate": 0.00017971325072229226, |
|
"loss": 0.6224, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.8716904276985744, |
|
"grad_norm": 0.34942692518234253, |
|
"learning_rate": 0.00017928109395298777, |
|
"loss": 0.617, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.8920570264765786, |
|
"grad_norm": 0.3333948850631714, |
|
"learning_rate": 0.00017884491381006478, |
|
"loss": 0.6269, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.9124236252545828, |
|
"grad_norm": 0.3345637321472168, |
|
"learning_rate": 0.00017840473242888486, |
|
"loss": 0.6226, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.9327902240325865, |
|
"grad_norm": 0.3559291958808899, |
|
"learning_rate": 0.0001779605721478652, |
|
"loss": 0.6173, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.9531568228105907, |
|
"grad_norm": 0.3167003393173218, |
|
"learning_rate": 0.0001775124555073452, |
|
"loss": 0.6224, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.973523421588595, |
|
"grad_norm": 0.3369687497615814, |
|
"learning_rate": 0.00017706040524844221, |
|
"loss": 0.6155, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.9938900203665986, |
|
"grad_norm": 0.30602747201919556, |
|
"learning_rate": 0.0001766044443118978, |
|
"loss": 0.6197, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.9979633401221997, |
|
"eval_loss": 1.8432329893112183, |
|
"eval_runtime": 0.4594, |
|
"eval_samples_per_second": 26.122, |
|
"eval_steps_per_second": 2.177, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 3.0142566191446027, |
|
"grad_norm": 0.3442608416080475, |
|
"learning_rate": 0.00017614459583691346, |
|
"loss": 0.6108, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.034623217922607, |
|
"grad_norm": 0.30495738983154297, |
|
"learning_rate": 0.0001756808831599762, |
|
"loss": 0.6031, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.054989816700611, |
|
"grad_norm": 0.34981706738471985, |
|
"learning_rate": 0.0001752133298136744, |
|
"loss": 0.5959, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.0753564154786153, |
|
"grad_norm": 0.32501649856567383, |
|
"learning_rate": 0.00017474195952550355, |
|
"loss": 0.5964, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.095723014256619, |
|
"grad_norm": 0.3137637972831726, |
|
"learning_rate": 0.00017426679621666212, |
|
"loss": 0.5885, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.116089613034623, |
|
"grad_norm": 0.3315506875514984, |
|
"learning_rate": 0.00017378786400083757, |
|
"loss": 0.5914, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.1364562118126273, |
|
"grad_norm": 0.33116060495376587, |
|
"learning_rate": 0.00017330518718298264, |
|
"loss": 0.5997, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.1568228105906315, |
|
"grad_norm": 0.33854183554649353, |
|
"learning_rate": 0.0001728187902580819, |
|
"loss": 0.5971, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.1771894093686353, |
|
"grad_norm": 0.31813180446624756, |
|
"learning_rate": 0.00017232869790990879, |
|
"loss": 0.5994, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.1975560081466394, |
|
"grad_norm": 0.30412253737449646, |
|
"learning_rate": 0.00017183493500977278, |
|
"loss": 0.5979, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.2179226069246436, |
|
"grad_norm": 0.3499339520931244, |
|
"learning_rate": 0.0001713375266152572, |
|
"loss": 0.5981, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.2382892057026478, |
|
"grad_norm": 0.3247123956680298, |
|
"learning_rate": 0.00017083649796894795, |
|
"loss": 0.5955, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.258655804480652, |
|
"grad_norm": 0.3245786726474762, |
|
"learning_rate": 0.00017033187449715196, |
|
"loss": 0.6019, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.2790224032586557, |
|
"grad_norm": 0.3269343674182892, |
|
"learning_rate": 0.00016982368180860728, |
|
"loss": 0.5979, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.29938900203666, |
|
"grad_norm": 0.4949992299079895, |
|
"learning_rate": 0.00016931194569318327, |
|
"loss": 0.6037, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.319755600814664, |
|
"grad_norm": 0.3468710780143738, |
|
"learning_rate": 0.00016879669212057187, |
|
"loss": 0.5943, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.340122199592668, |
|
"grad_norm": 0.323321670293808, |
|
"learning_rate": 0.00016827794723896968, |
|
"loss": 0.6003, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.360488798370672, |
|
"grad_norm": 0.3203057646751404, |
|
"learning_rate": 0.00016775573737375096, |
|
"loss": 0.6023, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.380855397148676, |
|
"grad_norm": 0.32133370637893677, |
|
"learning_rate": 0.0001672300890261317, |
|
"loss": 0.6043, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.4012219959266803, |
|
"grad_norm": 0.32316601276397705, |
|
"learning_rate": 0.0001667010288718247, |
|
"loss": 0.6051, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.4215885947046845, |
|
"grad_norm": 0.34438490867614746, |
|
"learning_rate": 0.00016616858375968595, |
|
"loss": 0.6039, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.4419551934826886, |
|
"grad_norm": 0.3012474477291107, |
|
"learning_rate": 0.0001656327807103518, |
|
"loss": 0.599, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.4623217922606924, |
|
"grad_norm": 0.3228248059749603, |
|
"learning_rate": 0.0001650936469148681, |
|
"loss": 0.604, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.4826883910386965, |
|
"grad_norm": 0.3279968798160553, |
|
"learning_rate": 0.00016455120973331, |
|
"loss": 0.5953, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 3.5030549898167007, |
|
"grad_norm": 0.3526194393634796, |
|
"learning_rate": 0.0001640054966933935, |
|
"loss": 0.6029, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.5234215885947044, |
|
"grad_norm": 0.37160396575927734, |
|
"learning_rate": 0.00016345653548907873, |
|
"loss": 0.6005, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 3.5437881873727086, |
|
"grad_norm": 0.33020690083503723, |
|
"learning_rate": 0.00016290435397916424, |
|
"loss": 0.6049, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.564154786150713, |
|
"grad_norm": 0.33525606989860535, |
|
"learning_rate": 0.00016234898018587337, |
|
"loss": 0.5942, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 3.584521384928717, |
|
"grad_norm": 0.33215492963790894, |
|
"learning_rate": 0.00016179044229343206, |
|
"loss": 0.6, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.604887983706721, |
|
"grad_norm": 0.3076721429824829, |
|
"learning_rate": 0.00016122876864663868, |
|
"loss": 0.5984, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.6252545824847253, |
|
"grad_norm": 0.379398375749588, |
|
"learning_rate": 0.00016066398774942554, |
|
"loss": 0.6005, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.645621181262729, |
|
"grad_norm": 0.3342379629611969, |
|
"learning_rate": 0.00016009612826341227, |
|
"loss": 0.5987, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.6659877800407332, |
|
"grad_norm": 0.3370605707168579, |
|
"learning_rate": 0.00015952521900645144, |
|
"loss": 0.6013, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.6863543788187374, |
|
"grad_norm": 0.3159192204475403, |
|
"learning_rate": 0.000158951288951166, |
|
"loss": 0.5992, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.706720977596741, |
|
"grad_norm": 0.3914301097393036, |
|
"learning_rate": 0.000158374367223479, |
|
"loss": 0.5994, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.7270875763747453, |
|
"grad_norm": 0.33679434657096863, |
|
"learning_rate": 0.00015779448310113554, |
|
"loss": 0.5949, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.7474541751527495, |
|
"grad_norm": 0.3156144618988037, |
|
"learning_rate": 0.00015721166601221698, |
|
"loss": 0.595, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.7678207739307537, |
|
"grad_norm": 0.3130475878715515, |
|
"learning_rate": 0.0001566259455336474, |
|
"loss": 0.596, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.788187372708758, |
|
"grad_norm": 0.303627610206604, |
|
"learning_rate": 0.00015603735138969272, |
|
"loss": 0.6036, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.8085539714867616, |
|
"grad_norm": 0.32147976756095886, |
|
"learning_rate": 0.0001554459134504523, |
|
"loss": 0.5971, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.8289205702647657, |
|
"grad_norm": 0.33086270093917847, |
|
"learning_rate": 0.000154851661730343, |
|
"loss": 0.5979, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.84928716904277, |
|
"grad_norm": 0.3000107407569885, |
|
"learning_rate": 0.00015425462638657595, |
|
"loss": 0.5914, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.869653767820774, |
|
"grad_norm": 0.292559415102005, |
|
"learning_rate": 0.0001536548377176263, |
|
"loss": 0.5941, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.890020366598778, |
|
"grad_norm": 0.30751997232437134, |
|
"learning_rate": 0.00015305232616169548, |
|
"loss": 0.6094, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.910386965376782, |
|
"grad_norm": 0.3731157183647156, |
|
"learning_rate": 0.00015244712229516656, |
|
"loss": 0.5955, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.930753564154786, |
|
"grad_norm": 0.3387170732021332, |
|
"learning_rate": 0.00015183925683105254, |
|
"loss": 0.5972, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.9511201629327903, |
|
"grad_norm": 0.3645428717136383, |
|
"learning_rate": 0.0001512287606174377, |
|
"loss": 0.5998, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.9714867617107945, |
|
"grad_norm": 0.29603078961372375, |
|
"learning_rate": 0.0001506156646359123, |
|
"loss": 0.596, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.9918533604887982, |
|
"grad_norm": 0.30991217494010925, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 0.6011, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.870729923248291, |
|
"eval_runtime": 0.3769, |
|
"eval_samples_per_second": 31.837, |
|
"eval_steps_per_second": 2.653, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 4.012219959266803, |
|
"grad_norm": 0.33605799078941345, |
|
"learning_rate": 0.00014938179795357916, |
|
"loss": 0.5858, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.032586558044806, |
|
"grad_norm": 0.3564818203449249, |
|
"learning_rate": 0.00014876108986929717, |
|
"loss": 0.5735, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.05295315682281, |
|
"grad_norm": 0.31967151165008545, |
|
"learning_rate": 0.00014813790724697832, |
|
"loss": 0.579, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.0733197556008145, |
|
"grad_norm": 0.33131125569343567, |
|
"learning_rate": 0.0001475122817120253, |
|
"loss": 0.57, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.093686354378819, |
|
"grad_norm": 0.3302120864391327, |
|
"learning_rate": 0.00014688424501381424, |
|
"loss": 0.5658, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.114052953156823, |
|
"grad_norm": 0.356545627117157, |
|
"learning_rate": 0.00014625382902408356, |
|
"loss": 0.5722, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.134419551934827, |
|
"grad_norm": 0.30831289291381836, |
|
"learning_rate": 0.0001456210657353163, |
|
"loss": 0.5775, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.154786150712831, |
|
"grad_norm": 0.32479575276374817, |
|
"learning_rate": 0.00014498598725911691, |
|
"loss": 0.5762, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.175152749490835, |
|
"grad_norm": 0.3275298476219177, |
|
"learning_rate": 0.00014434862582458135, |
|
"loss": 0.5742, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.195519348268839, |
|
"grad_norm": 0.32563847303390503, |
|
"learning_rate": 0.00014370901377666167, |
|
"loss": 0.5696, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.215885947046843, |
|
"grad_norm": 0.3277311325073242, |
|
"learning_rate": 0.00014306718357452446, |
|
"loss": 0.5683, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.236252545824847, |
|
"grad_norm": 0.3310577869415283, |
|
"learning_rate": 0.00014242316778990372, |
|
"loss": 0.5724, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.256619144602851, |
|
"grad_norm": 0.3116869330406189, |
|
"learning_rate": 0.00014177699910544793, |
|
"loss": 0.5748, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.276985743380855, |
|
"grad_norm": 0.4322071969509125, |
|
"learning_rate": 0.00014112871031306119, |
|
"loss": 0.5848, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.2973523421588595, |
|
"grad_norm": 0.3503554165363312, |
|
"learning_rate": 0.00014047833431223938, |
|
"loss": 0.5732, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.317718940936864, |
|
"grad_norm": 0.31962850689888, |
|
"learning_rate": 0.00013982590410840056, |
|
"loss": 0.5774, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.338085539714868, |
|
"grad_norm": 0.32895031571388245, |
|
"learning_rate": 0.00013917145281120983, |
|
"loss": 0.5765, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 4.358452138492872, |
|
"grad_norm": 0.3837604522705078, |
|
"learning_rate": 0.00013851501363289906, |
|
"loss": 0.5817, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.378818737270876, |
|
"grad_norm": 0.35425829887390137, |
|
"learning_rate": 0.0001378566198865818, |
|
"loss": 0.58, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 4.3991853360488795, |
|
"grad_norm": 0.33126357197761536, |
|
"learning_rate": 0.00013719630498456212, |
|
"loss": 0.5827, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.419551934826884, |
|
"grad_norm": 0.3265593945980072, |
|
"learning_rate": 0.00013653410243663952, |
|
"loss": 0.5826, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 4.439918533604888, |
|
"grad_norm": 0.4005591869354248, |
|
"learning_rate": 0.00013587004584840804, |
|
"loss": 0.5795, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.460285132382892, |
|
"grad_norm": 0.36212101578712463, |
|
"learning_rate": 0.00013520416891955102, |
|
"loss": 0.5711, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 4.480651731160896, |
|
"grad_norm": 0.340218186378479, |
|
"learning_rate": 0.00013453650544213076, |
|
"loss": 0.5821, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.5010183299389, |
|
"grad_norm": 0.31296971440315247, |
|
"learning_rate": 0.00013386708929887377, |
|
"loss": 0.5798, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 4.521384928716905, |
|
"grad_norm": 0.31503763794898987, |
|
"learning_rate": 0.00013319595446145116, |
|
"loss": 0.5757, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.541751527494909, |
|
"grad_norm": 0.3179926574230194, |
|
"learning_rate": 0.00013252313498875472, |
|
"loss": 0.5822, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 4.562118126272912, |
|
"grad_norm": 0.3273943066596985, |
|
"learning_rate": 0.00013184866502516845, |
|
"loss": 0.5796, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.582484725050916, |
|
"grad_norm": 0.3253306448459625, |
|
"learning_rate": 0.00013117257879883583, |
|
"loss": 0.5789, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 4.60285132382892, |
|
"grad_norm": 0.34257039427757263, |
|
"learning_rate": 0.00013049491061992274, |
|
"loss": 0.5798, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.6232179226069245, |
|
"grad_norm": 0.32451459765434265, |
|
"learning_rate": 0.00012981569487887637, |
|
"loss": 0.5766, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 4.643584521384929, |
|
"grad_norm": 0.31603989005088806, |
|
"learning_rate": 0.0001291349660446799, |
|
"loss": 0.5765, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.663951120162933, |
|
"grad_norm": 0.3706742525100708, |
|
"learning_rate": 0.00012845275866310324, |
|
"loss": 0.5809, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 4.684317718940937, |
|
"grad_norm": 0.32328835129737854, |
|
"learning_rate": 0.00012776910735495003, |
|
"loss": 0.5784, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.704684317718941, |
|
"grad_norm": 0.3305988907814026, |
|
"learning_rate": 0.00012708404681430053, |
|
"loss": 0.5678, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 4.725050916496945, |
|
"grad_norm": 0.33260881900787354, |
|
"learning_rate": 0.00012639761180675098, |
|
"loss": 0.5895, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.745417515274949, |
|
"grad_norm": 0.34639254212379456, |
|
"learning_rate": 0.0001257098371676495, |
|
"loss": 0.5823, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 4.765784114052953, |
|
"grad_norm": 0.3303966224193573, |
|
"learning_rate": 0.0001250207578003279, |
|
"loss": 0.5716, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.786150712830957, |
|
"grad_norm": 0.3739718496799469, |
|
"learning_rate": 0.0001243304086743309, |
|
"loss": 0.5811, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 4.806517311608961, |
|
"grad_norm": 0.36287903785705566, |
|
"learning_rate": 0.0001236388248236409, |
|
"loss": 0.5791, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.826883910386965, |
|
"grad_norm": 0.3199651837348938, |
|
"learning_rate": 0.00012294604134490056, |
|
"loss": 0.5769, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.84725050916497, |
|
"grad_norm": 0.3307904899120331, |
|
"learning_rate": 0.00012225209339563145, |
|
"loss": 0.5781, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.867617107942974, |
|
"grad_norm": 0.3095554709434509, |
|
"learning_rate": 0.00012155701619244997, |
|
"loss": 0.5776, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.887983706720978, |
|
"grad_norm": 0.318012535572052, |
|
"learning_rate": 0.0001208608450092801, |
|
"loss": 0.5816, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.908350305498981, |
|
"grad_norm": 0.31566402316093445, |
|
"learning_rate": 0.00012016361517556334, |
|
"loss": 0.578, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.928716904276985, |
|
"grad_norm": 0.32222023606300354, |
|
"learning_rate": 0.00011946536207446586, |
|
"loss": 0.5776, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.94908350305499, |
|
"grad_norm": 0.31605854630470276, |
|
"learning_rate": 0.00011876612114108277, |
|
"loss": 0.5793, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.969450101832994, |
|
"grad_norm": 0.3147432804107666, |
|
"learning_rate": 0.0001180659278606399, |
|
"loss": 0.5753, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.989816700610998, |
|
"grad_norm": 0.3298138380050659, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.5805, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.997963340122199, |
|
"eval_loss": 1.9009366035461426, |
|
"eval_runtime": 0.3793, |
|
"eval_samples_per_second": 31.637, |
|
"eval_steps_per_second": 2.636, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 5.010183299389002, |
|
"grad_norm": 0.3077872395515442, |
|
"learning_rate": 0.00011666282643932458, |
|
"loss": 0.5659, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.030549898167006, |
|
"grad_norm": 0.3151206374168396, |
|
"learning_rate": 0.00011595998950333793, |
|
"loss": 0.5451, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 5.05091649694501, |
|
"grad_norm": 0.31535080075263977, |
|
"learning_rate": 0.00011525634262644964, |
|
"loss": 0.5478, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.071283095723015, |
|
"grad_norm": 0.3317250907421112, |
|
"learning_rate": 0.00011455192151747932, |
|
"loss": 0.5568, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 5.091649694501018, |
|
"grad_norm": 0.34287726879119873, |
|
"learning_rate": 0.0001138467619245374, |
|
"loss": 0.5529, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 5.112016293279022, |
|
"grad_norm": 0.34103691577911377, |
|
"learning_rate": 0.00011314089963321119, |
|
"loss": 0.5546, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 5.132382892057026, |
|
"grad_norm": 0.34115347266197205, |
|
"learning_rate": 0.00011243437046474853, |
|
"loss": 0.5571, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 5.15274949083503, |
|
"grad_norm": 0.350697785615921, |
|
"learning_rate": 0.0001117272102742402, |
|
"loss": 0.5534, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 5.173116089613035, |
|
"grad_norm": 0.34019774198532104, |
|
"learning_rate": 0.00011101945494880012, |
|
"loss": 0.5536, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 5.193482688391039, |
|
"grad_norm": 0.34322109818458557, |
|
"learning_rate": 0.00011031114040574437, |
|
"loss": 0.5577, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 5.213849287169043, |
|
"grad_norm": 0.33681756258010864, |
|
"learning_rate": 0.00010960230259076818, |
|
"loss": 0.5555, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 5.234215885947047, |
|
"grad_norm": 0.335644394159317, |
|
"learning_rate": 0.00010889297747612202, |
|
"loss": 0.5559, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 5.254582484725051, |
|
"grad_norm": 0.33518001437187195, |
|
"learning_rate": 0.00010818320105878584, |
|
"loss": 0.5547, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 5.274949083503055, |
|
"grad_norm": 0.32509303092956543, |
|
"learning_rate": 0.00010747300935864243, |
|
"loss": 0.5539, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 5.295315682281059, |
|
"grad_norm": 0.33121034502983093, |
|
"learning_rate": 0.0001067624384166495, |
|
"loss": 0.5609, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 5.315682281059063, |
|
"grad_norm": 0.34210404753685, |
|
"learning_rate": 0.00010605152429301055, |
|
"loss": 0.5629, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 5.336048879837067, |
|
"grad_norm": 0.3202488422393799, |
|
"learning_rate": 0.0001053403030653449, |
|
"loss": 0.5616, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 5.356415478615071, |
|
"grad_norm": 0.33203092217445374, |
|
"learning_rate": 0.00010462881082685691, |
|
"loss": 0.5625, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 5.3767820773930755, |
|
"grad_norm": 0.37128540873527527, |
|
"learning_rate": 0.00010391708368450427, |
|
"loss": 0.5531, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 5.39714867617108, |
|
"grad_norm": 0.32188987731933594, |
|
"learning_rate": 0.00010320515775716555, |
|
"loss": 0.5559, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 5.417515274949084, |
|
"grad_norm": 0.33244988322257996, |
|
"learning_rate": 0.0001024930691738073, |
|
"loss": 0.5579, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 5.437881873727088, |
|
"grad_norm": 0.3311491310596466, |
|
"learning_rate": 0.00010178085407165066, |
|
"loss": 0.5545, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 5.458248472505091, |
|
"grad_norm": 0.3205713629722595, |
|
"learning_rate": 0.00010106854859433734, |
|
"loss": 0.5596, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 5.478615071283095, |
|
"grad_norm": 0.3317239284515381, |
|
"learning_rate": 0.00010035618889009535, |
|
"loss": 0.5669, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 5.4989816700611, |
|
"grad_norm": 0.3288976848125458, |
|
"learning_rate": 9.96438111099047e-05, |
|
"loss": 0.5633, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 5.519348268839104, |
|
"grad_norm": 0.33682990074157715, |
|
"learning_rate": 9.893145140566269e-05, |
|
"loss": 0.5603, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 5.539714867617108, |
|
"grad_norm": 0.3263765871524811, |
|
"learning_rate": 9.821914592834935e-05, |
|
"loss": 0.5604, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 5.560081466395112, |
|
"grad_norm": 0.3341580033302307, |
|
"learning_rate": 9.750693082619273e-05, |
|
"loss": 0.5627, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 5.580448065173116, |
|
"grad_norm": 0.3362567126750946, |
|
"learning_rate": 9.679484224283449e-05, |
|
"loss": 0.5595, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 5.6008146639511205, |
|
"grad_norm": 0.33394739031791687, |
|
"learning_rate": 9.608291631549574e-05, |
|
"loss": 0.5548, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 5.621181262729124, |
|
"grad_norm": 0.3413245975971222, |
|
"learning_rate": 9.537118917314311e-05, |
|
"loss": 0.5509, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 5.641547861507128, |
|
"grad_norm": 0.32499754428863525, |
|
"learning_rate": 9.46596969346551e-05, |
|
"loss": 0.5582, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 5.661914460285132, |
|
"grad_norm": 0.3440452218055725, |
|
"learning_rate": 9.39484757069895e-05, |
|
"loss": 0.5638, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 5.682281059063136, |
|
"grad_norm": 0.3295697271823883, |
|
"learning_rate": 9.323756158335053e-05, |
|
"loss": 0.5592, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 5.7026476578411405, |
|
"grad_norm": 0.32334938645362854, |
|
"learning_rate": 9.252699064135758e-05, |
|
"loss": 0.5609, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 5.723014256619145, |
|
"grad_norm": 0.33664849400520325, |
|
"learning_rate": 9.181679894121421e-05, |
|
"loss": 0.5584, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 5.743380855397149, |
|
"grad_norm": 0.3422396779060364, |
|
"learning_rate": 9.1107022523878e-05, |
|
"loss": 0.5553, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 5.763747454175153, |
|
"grad_norm": 0.3184050917625427, |
|
"learning_rate": 9.039769740923183e-05, |
|
"loss": 0.5604, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 5.784114052953157, |
|
"grad_norm": 0.326419860124588, |
|
"learning_rate": 8.968885959425567e-05, |
|
"loss": 0.5645, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 5.804480651731161, |
|
"grad_norm": 0.33074718713760376, |
|
"learning_rate": 8.898054505119989e-05, |
|
"loss": 0.5654, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 5.824847250509165, |
|
"grad_norm": 0.3398876488208771, |
|
"learning_rate": 8.827278972575983e-05, |
|
"loss": 0.5635, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 5.845213849287169, |
|
"grad_norm": 0.33468544483184814, |
|
"learning_rate": 8.756562953525152e-05, |
|
"loss": 0.5557, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 5.865580448065173, |
|
"grad_norm": 0.3315589427947998, |
|
"learning_rate": 8.685910036678883e-05, |
|
"loss": 0.5626, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.885947046843177, |
|
"grad_norm": 0.3360842764377594, |
|
"learning_rate": 8.615323807546258e-05, |
|
"loss": 0.5588, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 5.906313645621181, |
|
"grad_norm": 0.3325459063053131, |
|
"learning_rate": 8.54480784825207e-05, |
|
"loss": 0.5569, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.9266802443991855, |
|
"grad_norm": 0.3266001343727112, |
|
"learning_rate": 8.474365737355038e-05, |
|
"loss": 0.5617, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 5.94704684317719, |
|
"grad_norm": 0.3289891481399536, |
|
"learning_rate": 8.404001049666211e-05, |
|
"loss": 0.5572, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.967413441955194, |
|
"grad_norm": 0.32171157002449036, |
|
"learning_rate": 8.333717356067543e-05, |
|
"loss": 0.5571, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 5.987780040733197, |
|
"grad_norm": 0.3251187205314636, |
|
"learning_rate": 8.263518223330697e-05, |
|
"loss": 0.5585, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.9298146963119507, |
|
"eval_runtime": 0.3781, |
|
"eval_samples_per_second": 31.738, |
|
"eval_steps_per_second": 2.645, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 6.008146639511201, |
|
"grad_norm": 0.32297611236572266, |
|
"learning_rate": 8.193407213936012e-05, |
|
"loss": 0.5502, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 6.0285132382892055, |
|
"grad_norm": 0.32624468207359314, |
|
"learning_rate": 8.123387885891725e-05, |
|
"loss": 0.5355, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 6.04887983706721, |
|
"grad_norm": 0.34197378158569336, |
|
"learning_rate": 8.053463792553416e-05, |
|
"loss": 0.5415, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 6.069246435845214, |
|
"grad_norm": 0.3497610092163086, |
|
"learning_rate": 7.98363848244367e-05, |
|
"loss": 0.5418, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 6.089613034623218, |
|
"grad_norm": 0.35511311888694763, |
|
"learning_rate": 7.913915499071993e-05, |
|
"loss": 0.5353, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 6.109979633401222, |
|
"grad_norm": 0.33935195207595825, |
|
"learning_rate": 7.844298380755003e-05, |
|
"loss": 0.5371, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 6.130346232179226, |
|
"grad_norm": 0.33899983763694763, |
|
"learning_rate": 7.774790660436858e-05, |
|
"loss": 0.5379, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 6.1507128309572305, |
|
"grad_norm": 0.35454773902893066, |
|
"learning_rate": 7.705395865509947e-05, |
|
"loss": 0.5353, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 6.171079429735234, |
|
"grad_norm": 0.3619644045829773, |
|
"learning_rate": 7.636117517635912e-05, |
|
"loss": 0.5402, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 6.191446028513238, |
|
"grad_norm": 0.34515708684921265, |
|
"learning_rate": 7.566959132566915e-05, |
|
"loss": 0.5377, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 6.211812627291242, |
|
"grad_norm": 0.347515344619751, |
|
"learning_rate": 7.497924219967209e-05, |
|
"loss": 0.535, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 6.232179226069246, |
|
"grad_norm": 0.34020859003067017, |
|
"learning_rate": 7.429016283235053e-05, |
|
"loss": 0.5406, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 6.2525458248472505, |
|
"grad_norm": 0.34483760595321655, |
|
"learning_rate": 7.360238819324903e-05, |
|
"loss": 0.5377, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 6.272912423625255, |
|
"grad_norm": 0.34881579875946045, |
|
"learning_rate": 7.291595318569951e-05, |
|
"loss": 0.5372, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 6.293279022403259, |
|
"grad_norm": 0.37821272015571594, |
|
"learning_rate": 7.223089264505e-05, |
|
"loss": 0.5383, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 6.313645621181263, |
|
"grad_norm": 0.35509517788887024, |
|
"learning_rate": 7.154724133689677e-05, |
|
"loss": 0.5464, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 6.334012219959266, |
|
"grad_norm": 0.34138378500938416, |
|
"learning_rate": 7.086503395532012e-05, |
|
"loss": 0.5366, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 6.3543788187372705, |
|
"grad_norm": 0.3617904484272003, |
|
"learning_rate": 7.018430512112366e-05, |
|
"loss": 0.541, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 6.374745417515275, |
|
"grad_norm": 0.3442498743534088, |
|
"learning_rate": 6.950508938007729e-05, |
|
"loss": 0.5388, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 6.395112016293279, |
|
"grad_norm": 0.3558500111103058, |
|
"learning_rate": 6.88274212011642e-05, |
|
"loss": 0.5363, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 6.415478615071283, |
|
"grad_norm": 0.35669848322868347, |
|
"learning_rate": 6.815133497483157e-05, |
|
"loss": 0.5462, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 6.435845213849287, |
|
"grad_norm": 0.3383665084838867, |
|
"learning_rate": 6.74768650112453e-05, |
|
"loss": 0.5389, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 6.456211812627291, |
|
"grad_norm": 0.3450273275375366, |
|
"learning_rate": 6.680404553854885e-05, |
|
"loss": 0.5378, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 6.4765784114052956, |
|
"grad_norm": 0.34580737352371216, |
|
"learning_rate": 6.613291070112624e-05, |
|
"loss": 0.5401, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 6.4969450101833, |
|
"grad_norm": 0.34363240003585815, |
|
"learning_rate": 6.546349455786926e-05, |
|
"loss": 0.5378, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 6.517311608961304, |
|
"grad_norm": 0.35278892517089844, |
|
"learning_rate": 6.479583108044899e-05, |
|
"loss": 0.5387, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 6.537678207739307, |
|
"grad_norm": 0.36023351550102234, |
|
"learning_rate": 6.412995415159197e-05, |
|
"loss": 0.5421, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 6.558044806517311, |
|
"grad_norm": 0.3527635633945465, |
|
"learning_rate": 6.34658975633605e-05, |
|
"loss": 0.5469, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 6.5784114052953155, |
|
"grad_norm": 0.3474122881889343, |
|
"learning_rate": 6.28036950154379e-05, |
|
"loss": 0.5468, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 6.59877800407332, |
|
"grad_norm": 0.336265504360199, |
|
"learning_rate": 6.214338011341824e-05, |
|
"loss": 0.5459, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 6.619144602851324, |
|
"grad_norm": 0.3502029776573181, |
|
"learning_rate": 6.148498636710092e-05, |
|
"loss": 0.5399, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 6.639511201629328, |
|
"grad_norm": 0.3454211950302124, |
|
"learning_rate": 6.082854718879021e-05, |
|
"loss": 0.5376, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 6.659877800407332, |
|
"grad_norm": 0.3441862165927887, |
|
"learning_rate": 6.017409589159946e-05, |
|
"loss": 0.543, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 6.680244399185336, |
|
"grad_norm": 0.33739784359931946, |
|
"learning_rate": 5.952166568776062e-05, |
|
"loss": 0.5441, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 6.70061099796334, |
|
"grad_norm": 0.3396100103855133, |
|
"learning_rate": 5.887128968693887e-05, |
|
"loss": 0.5471, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 6.720977596741344, |
|
"grad_norm": 0.35324475169181824, |
|
"learning_rate": 5.822300089455211e-05, |
|
"loss": 0.5408, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 6.741344195519348, |
|
"grad_norm": 0.3377906084060669, |
|
"learning_rate": 5.7576832210096245e-05, |
|
"loss": 0.5416, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 6.761710794297352, |
|
"grad_norm": 0.3413607180118561, |
|
"learning_rate": 5.6932816425475554e-05, |
|
"loss": 0.5425, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 6.782077393075356, |
|
"grad_norm": 0.3485510051250458, |
|
"learning_rate": 5.629098622333837e-05, |
|
"loss": 0.5459, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 6.802443991853361, |
|
"grad_norm": 0.3555828332901001, |
|
"learning_rate": 5.5651374175418656e-05, |
|
"loss": 0.5473, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 6.822810590631365, |
|
"grad_norm": 0.34203040599823, |
|
"learning_rate": 5.5014012740883115e-05, |
|
"loss": 0.539, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 6.843177189409369, |
|
"grad_norm": 0.34898054599761963, |
|
"learning_rate": 5.43789342646837e-05, |
|
"loss": 0.5388, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 6.863543788187373, |
|
"grad_norm": 0.3571433126926422, |
|
"learning_rate": 5.37461709759165e-05, |
|
"loss": 0.5486, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 6.883910386965377, |
|
"grad_norm": 0.34400326013565063, |
|
"learning_rate": 5.3115754986185774e-05, |
|
"loss": 0.545, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 6.904276985743381, |
|
"grad_norm": 0.3509522080421448, |
|
"learning_rate": 5.248771828797474e-05, |
|
"loss": 0.5438, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 6.924643584521385, |
|
"grad_norm": 0.33699506521224976, |
|
"learning_rate": 5.1862092753021754e-05, |
|
"loss": 0.5396, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 6.945010183299389, |
|
"grad_norm": 0.3432750105857849, |
|
"learning_rate": 5.123891013070288e-05, |
|
"loss": 0.5468, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 6.965376782077393, |
|
"grad_norm": 0.3491397500038147, |
|
"learning_rate": 5.061820204642085e-05, |
|
"loss": 0.5467, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 6.985743380855397, |
|
"grad_norm": 0.34977835416793823, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.5413, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 6.997963340122199, |
|
"eval_loss": 1.9540338516235352, |
|
"eval_runtime": 0.3818, |
|
"eval_samples_per_second": 31.43, |
|
"eval_steps_per_second": 2.619, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 7.006109979633401, |
|
"grad_norm": 0.35306668281555176, |
|
"learning_rate": 4.938433536408771e-05, |
|
"loss": 0.5349, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 7.026476578411406, |
|
"grad_norm": 0.3659164011478424, |
|
"learning_rate": 4.8771239382562287e-05, |
|
"loss": 0.5277, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 7.04684317718941, |
|
"grad_norm": 0.35312947630882263, |
|
"learning_rate": 4.8160743168947496e-05, |
|
"loss": 0.5283, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 7.067209775967413, |
|
"grad_norm": 0.37476396560668945, |
|
"learning_rate": 4.755287770483349e-05, |
|
"loss": 0.5254, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 7.087576374745417, |
|
"grad_norm": 0.37969648838043213, |
|
"learning_rate": 4.694767383830453e-05, |
|
"loss": 0.5267, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 7.107942973523421, |
|
"grad_norm": 0.3799395263195038, |
|
"learning_rate": 4.634516228237372e-05, |
|
"loss": 0.5191, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 7.128309572301426, |
|
"grad_norm": 0.3764006495475769, |
|
"learning_rate": 4.574537361342407e-05, |
|
"loss": 0.5243, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 7.14867617107943, |
|
"grad_norm": 0.34774523973464966, |
|
"learning_rate": 4.514833826965705e-05, |
|
"loss": 0.5261, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 7.169042769857434, |
|
"grad_norm": 0.3704371750354767, |
|
"learning_rate": 4.4554086549547715e-05, |
|
"loss": 0.5313, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 7.189409368635438, |
|
"grad_norm": 0.3685317039489746, |
|
"learning_rate": 4.3962648610307286e-05, |
|
"loss": 0.5301, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 7.209775967413442, |
|
"grad_norm": 0.35810336470603943, |
|
"learning_rate": 4.337405446635264e-05, |
|
"loss": 0.5253, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 7.2301425661914465, |
|
"grad_norm": 0.3616923987865448, |
|
"learning_rate": 4.278833398778306e-05, |
|
"loss": 0.5272, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 7.25050916496945, |
|
"grad_norm": 0.3660266399383545, |
|
"learning_rate": 4.2205516898864463e-05, |
|
"loss": 0.5304, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 7.270875763747454, |
|
"grad_norm": 0.36601680517196655, |
|
"learning_rate": 4.1625632776521037e-05, |
|
"loss": 0.5327, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 7.291242362525458, |
|
"grad_norm": 0.36126160621643066, |
|
"learning_rate": 4.1048711048834033e-05, |
|
"loss": 0.5256, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 7.311608961303462, |
|
"grad_norm": 0.3732444941997528, |
|
"learning_rate": 4.0474780993548566e-05, |
|
"loss": 0.5261, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 7.3319755600814664, |
|
"grad_norm": 0.36716046929359436, |
|
"learning_rate": 3.990387173658774e-05, |
|
"loss": 0.53, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 7.352342158859471, |
|
"grad_norm": 0.3701815903186798, |
|
"learning_rate": 3.933601225057446e-05, |
|
"loss": 0.532, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 7.372708757637475, |
|
"grad_norm": 0.3797333836555481, |
|
"learning_rate": 3.8771231353361326e-05, |
|
"loss": 0.524, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 7.393075356415479, |
|
"grad_norm": 0.364581435918808, |
|
"learning_rate": 3.820955770656798e-05, |
|
"loss": 0.524, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 7.413441955193482, |
|
"grad_norm": 0.36119377613067627, |
|
"learning_rate": 3.7651019814126654e-05, |
|
"loss": 0.5268, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 7.433808553971486, |
|
"grad_norm": 0.35705694556236267, |
|
"learning_rate": 3.7095646020835754e-05, |
|
"loss": 0.524, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 7.454175152749491, |
|
"grad_norm": 0.3591172993183136, |
|
"learning_rate": 3.654346451092129e-05, |
|
"loss": 0.5306, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 7.474541751527495, |
|
"grad_norm": 0.3496874272823334, |
|
"learning_rate": 3.5994503306606497e-05, |
|
"loss": 0.5292, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 7.494908350305499, |
|
"grad_norm": 0.3553576171398163, |
|
"learning_rate": 3.544879026669005e-05, |
|
"loss": 0.5273, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 7.515274949083503, |
|
"grad_norm": 0.3603392541408539, |
|
"learning_rate": 3.4906353085131914e-05, |
|
"loss": 0.5367, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 7.535641547861507, |
|
"grad_norm": 0.3681369721889496, |
|
"learning_rate": 3.436721928964819e-05, |
|
"loss": 0.5243, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 7.5560081466395115, |
|
"grad_norm": 0.36058658361434937, |
|
"learning_rate": 3.383141624031408e-05, |
|
"loss": 0.5177, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 7.576374745417516, |
|
"grad_norm": 0.3690408170223236, |
|
"learning_rate": 3.329897112817529e-05, |
|
"loss": 0.5168, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 7.59674134419552, |
|
"grad_norm": 0.3683675229549408, |
|
"learning_rate": 3.276991097386831e-05, |
|
"loss": 0.5259, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 7.617107942973523, |
|
"grad_norm": 0.3538174033164978, |
|
"learning_rate": 3.2244262626249075e-05, |
|
"loss": 0.5241, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 7.637474541751527, |
|
"grad_norm": 0.357802152633667, |
|
"learning_rate": 3.172205276103033e-05, |
|
"loss": 0.5203, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 7.6578411405295315, |
|
"grad_norm": 0.3668569028377533, |
|
"learning_rate": 3.120330787942815e-05, |
|
"loss": 0.5295, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 7.678207739307536, |
|
"grad_norm": 0.3501560389995575, |
|
"learning_rate": 3.068805430681675e-05, |
|
"loss": 0.534, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 7.69857433808554, |
|
"grad_norm": 0.35364505648612976, |
|
"learning_rate": 3.0176318191392726e-05, |
|
"loss": 0.5311, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 7.718940936863544, |
|
"grad_norm": 0.3579385578632355, |
|
"learning_rate": 2.966812550284803e-05, |
|
"loss": 0.5261, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 7.739307535641548, |
|
"grad_norm": 0.3566991090774536, |
|
"learning_rate": 2.916350203105207e-05, |
|
"loss": 0.5238, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 7.7596741344195515, |
|
"grad_norm": 0.3725852966308594, |
|
"learning_rate": 2.8662473384742773e-05, |
|
"loss": 0.5279, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 7.780040733197556, |
|
"grad_norm": 0.3531646132469177, |
|
"learning_rate": 2.8165064990227252e-05, |
|
"loss": 0.5266, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 7.80040733197556, |
|
"grad_norm": 0.36464017629623413, |
|
"learning_rate": 2.76713020900912e-05, |
|
"loss": 0.5345, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 7.820773930753564, |
|
"grad_norm": 0.36327338218688965, |
|
"learning_rate": 2.718120974191809e-05, |
|
"loss": 0.5211, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 7.841140529531568, |
|
"grad_norm": 0.3583681583404541, |
|
"learning_rate": 2.669481281701739e-05, |
|
"loss": 0.531, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 7.861507128309572, |
|
"grad_norm": 0.36573490500450134, |
|
"learning_rate": 2.6212135999162445e-05, |
|
"loss": 0.5236, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 7.8818737270875765, |
|
"grad_norm": 0.3562600910663605, |
|
"learning_rate": 2.573320378333789e-05, |
|
"loss": 0.5273, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 7.902240325865581, |
|
"grad_norm": 0.3540510833263397, |
|
"learning_rate": 2.525804047449648e-05, |
|
"loss": 0.53, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 7.922606924643585, |
|
"grad_norm": 0.35902467370033264, |
|
"learning_rate": 2.478667018632562e-05, |
|
"loss": 0.5273, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 7.942973523421589, |
|
"grad_norm": 0.37109845876693726, |
|
"learning_rate": 2.4319116840023813e-05, |
|
"loss": 0.5305, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 7.963340122199592, |
|
"grad_norm": 0.36434054374694824, |
|
"learning_rate": 2.3855404163086558e-05, |
|
"loss": 0.5287, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 7.9837067209775965, |
|
"grad_norm": 0.35995838046073914, |
|
"learning_rate": 2.339555568810221e-05, |
|
"loss": 0.5295, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.981424331665039, |
|
"eval_runtime": 0.3785, |
|
"eval_samples_per_second": 31.705, |
|
"eval_steps_per_second": 2.642, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 8.004073319755602, |
|
"grad_norm": 0.3557198941707611, |
|
"learning_rate": 2.2939594751557802e-05, |
|
"loss": 0.5225, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 8.024439918533606, |
|
"grad_norm": 0.362589955329895, |
|
"learning_rate": 2.248754449265483e-05, |
|
"loss": 0.5222, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 8.044806517311608, |
|
"grad_norm": 0.36456504464149475, |
|
"learning_rate": 2.2039427852134788e-05, |
|
"loss": 0.5174, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 8.065173116089612, |
|
"grad_norm": 0.35784122347831726, |
|
"learning_rate": 2.1595267571115163e-05, |
|
"loss": 0.5102, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 8.085539714867616, |
|
"grad_norm": 0.3696603775024414, |
|
"learning_rate": 2.1155086189935224e-05, |
|
"loss": 0.5194, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 8.10590631364562, |
|
"grad_norm": 0.3742619454860687, |
|
"learning_rate": 2.0718906047012242e-05, |
|
"loss": 0.5176, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 8.126272912423625, |
|
"grad_norm": 0.368327260017395, |
|
"learning_rate": 2.0286749277707782e-05, |
|
"loss": 0.5175, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 8.146639511201629, |
|
"grad_norm": 0.3673515319824219, |
|
"learning_rate": 1.985863781320435e-05, |
|
"loss": 0.5188, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 8.167006109979633, |
|
"grad_norm": 0.3596293330192566, |
|
"learning_rate": 1.9434593379392562e-05, |
|
"loss": 0.5179, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 8.187372708757637, |
|
"grad_norm": 0.36756983399391174, |
|
"learning_rate": 1.9014637495768483e-05, |
|
"loss": 0.5137, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 8.207739307535642, |
|
"grad_norm": 0.3634319603443146, |
|
"learning_rate": 1.8598791474341514e-05, |
|
"loss": 0.5173, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 8.228105906313646, |
|
"grad_norm": 0.37804871797561646, |
|
"learning_rate": 1.8187076418552974e-05, |
|
"loss": 0.5177, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 8.24847250509165, |
|
"grad_norm": 0.3717120885848999, |
|
"learning_rate": 1.777951322220508e-05, |
|
"loss": 0.5151, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 8.268839103869654, |
|
"grad_norm": 0.3710237443447113, |
|
"learning_rate": 1.7376122568400532e-05, |
|
"loss": 0.5186, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 8.289205702647658, |
|
"grad_norm": 0.369852215051651, |
|
"learning_rate": 1.697692492849299e-05, |
|
"loss": 0.5146, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 8.309572301425662, |
|
"grad_norm": 0.3662493824958801, |
|
"learning_rate": 1.658194056104825e-05, |
|
"loss": 0.5163, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 8.329938900203667, |
|
"grad_norm": 0.3654170334339142, |
|
"learning_rate": 1.619118951081594e-05, |
|
"loss": 0.5182, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 8.35030549898167, |
|
"grad_norm": 0.3634709119796753, |
|
"learning_rate": 1.580469160771253e-05, |
|
"loss": 0.5182, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 8.370672097759675, |
|
"grad_norm": 0.3728494942188263, |
|
"learning_rate": 1.54224664658148e-05, |
|
"loss": 0.5193, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 8.391038696537677, |
|
"grad_norm": 0.3711146116256714, |
|
"learning_rate": 1.504453348236461e-05, |
|
"loss": 0.5183, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 8.411405295315681, |
|
"grad_norm": 0.36539244651794434, |
|
"learning_rate": 1.467091183678444e-05, |
|
"loss": 0.5167, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 8.431771894093686, |
|
"grad_norm": 0.367108017206192, |
|
"learning_rate": 1.430162048970407e-05, |
|
"loss": 0.525, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 8.45213849287169, |
|
"grad_norm": 0.3688015341758728, |
|
"learning_rate": 1.3936678181998374e-05, |
|
"loss": 0.5202, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 8.472505091649694, |
|
"grad_norm": 0.36969494819641113, |
|
"learning_rate": 1.357610343383634e-05, |
|
"loss": 0.5214, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 8.492871690427698, |
|
"grad_norm": 0.36953508853912354, |
|
"learning_rate": 1.3219914543741008e-05, |
|
"loss": 0.5217, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 8.513238289205702, |
|
"grad_norm": 0.3687889575958252, |
|
"learning_rate": 1.286812958766106e-05, |
|
"loss": 0.5153, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 8.533604887983707, |
|
"grad_norm": 0.36163973808288574, |
|
"learning_rate": 1.2520766418053408e-05, |
|
"loss": 0.5167, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 8.55397148676171, |
|
"grad_norm": 0.37320175766944885, |
|
"learning_rate": 1.2177842662977135e-05, |
|
"loss": 0.5144, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 8.574338085539715, |
|
"grad_norm": 0.36868005990982056, |
|
"learning_rate": 1.1839375725199098e-05, |
|
"loss": 0.5227, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 8.594704684317719, |
|
"grad_norm": 0.36514589190483093, |
|
"learning_rate": 1.1505382781310559e-05, |
|
"loss": 0.5157, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 8.615071283095723, |
|
"grad_norm": 0.3700886070728302, |
|
"learning_rate": 1.1175880780855608e-05, |
|
"loss": 0.5187, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 8.635437881873727, |
|
"grad_norm": 0.3674730956554413, |
|
"learning_rate": 1.0850886445471054e-05, |
|
"loss": 0.5153, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 8.655804480651732, |
|
"grad_norm": 0.37101948261260986, |
|
"learning_rate": 1.0530416268037702e-05, |
|
"loss": 0.5149, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 8.676171079429736, |
|
"grad_norm": 0.36893007159233093, |
|
"learning_rate": 1.021448651184349e-05, |
|
"loss": 0.5183, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 8.69653767820774, |
|
"grad_norm": 0.3680102527141571, |
|
"learning_rate": 9.903113209758096e-06, |
|
"loss": 0.5163, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 8.716904276985744, |
|
"grad_norm": 0.36099961400032043, |
|
"learning_rate": 9.596312163419274e-06, |
|
"loss": 0.5188, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 8.737270875763748, |
|
"grad_norm": 0.36749574542045593, |
|
"learning_rate": 9.294098942430996e-06, |
|
"loss": 0.5181, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 8.757637474541752, |
|
"grad_norm": 0.36886829137802124, |
|
"learning_rate": 8.99648888357335e-06, |
|
"loss": 0.5171, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 8.778004073319755, |
|
"grad_norm": 0.3683699071407318, |
|
"learning_rate": 8.703497090024116e-06, |
|
"loss": 0.5118, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 8.798370672097759, |
|
"grad_norm": 0.3667095899581909, |
|
"learning_rate": 8.415138430592428e-06, |
|
"loss": 0.5157, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 8.818737270875763, |
|
"grad_norm": 0.36200326681137085, |
|
"learning_rate": 8.131427538964164e-06, |
|
"loss": 0.5167, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 8.839103869653767, |
|
"grad_norm": 0.3657187521457672, |
|
"learning_rate": 7.852378812959227e-06, |
|
"loss": 0.5155, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 8.859470468431772, |
|
"grad_norm": 0.3640839457511902, |
|
"learning_rate": 7.578006413801075e-06, |
|
"loss": 0.5179, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 8.879837067209776, |
|
"grad_norm": 0.3747439980506897, |
|
"learning_rate": 7.308324265397836e-06, |
|
"loss": 0.5154, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 8.90020366598778, |
|
"grad_norm": 0.366439551115036, |
|
"learning_rate": 7.0433460536358685e-06, |
|
"loss": 0.5147, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 8.920570264765784, |
|
"grad_norm": 0.37617239356040955, |
|
"learning_rate": 6.783085225685148e-06, |
|
"loss": 0.52, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 8.940936863543788, |
|
"grad_norm": 0.3725389540195465, |
|
"learning_rate": 6.527554989316897e-06, |
|
"loss": 0.517, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 8.961303462321792, |
|
"grad_norm": 0.3703247606754303, |
|
"learning_rate": 6.276768312233228e-06, |
|
"loss": 0.52, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 8.981670061099797, |
|
"grad_norm": 0.35923415422439575, |
|
"learning_rate": 6.030737921409169e-06, |
|
"loss": 0.5154, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 8.9979633401222, |
|
"eval_loss": 1.9978961944580078, |
|
"eval_runtime": 0.3817, |
|
"eval_samples_per_second": 31.441, |
|
"eval_steps_per_second": 2.62, |
|
"step": 2209 |
|
}, |
|
{ |
|
"epoch": 9.0020366598778, |
|
"grad_norm": 0.35924673080444336, |
|
"learning_rate": 5.789476302446662e-06, |
|
"loss": 0.5142, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 9.022403258655805, |
|
"grad_norm": 0.3611084818840027, |
|
"learning_rate": 5.552995698941088e-06, |
|
"loss": 0.5042, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 9.04276985743381, |
|
"grad_norm": 0.3653542101383209, |
|
"learning_rate": 5.321308111859791e-06, |
|
"loss": 0.514, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 9.063136456211813, |
|
"grad_norm": 0.36559605598449707, |
|
"learning_rate": 5.094425298933136e-06, |
|
"loss": 0.508, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 9.083503054989817, |
|
"grad_norm": 0.3634701669216156, |
|
"learning_rate": 4.872358774057806e-06, |
|
"loss": 0.5073, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 9.103869653767822, |
|
"grad_norm": 0.36651164293289185, |
|
"learning_rate": 4.655119806712482e-06, |
|
"loss": 0.5087, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 9.124236252545824, |
|
"grad_norm": 0.36734244227409363, |
|
"learning_rate": 4.442719421385922e-06, |
|
"loss": 0.5099, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 9.144602851323828, |
|
"grad_norm": 0.3621695339679718, |
|
"learning_rate": 4.235168397017541e-06, |
|
"loss": 0.5093, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 9.164969450101832, |
|
"grad_norm": 0.36267393827438354, |
|
"learning_rate": 4.0324772664503296e-06, |
|
"loss": 0.5087, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 9.185336048879837, |
|
"grad_norm": 0.36996185779571533, |
|
"learning_rate": 3.8346563158963785e-06, |
|
"loss": 0.5129, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 9.20570264765784, |
|
"grad_norm": 0.3650117814540863, |
|
"learning_rate": 3.641715584414862e-06, |
|
"loss": 0.5145, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 9.226069246435845, |
|
"grad_norm": 0.37506943941116333, |
|
"learning_rate": 3.453664863402595e-06, |
|
"loss": 0.5174, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 9.246435845213849, |
|
"grad_norm": 0.37266090512275696, |
|
"learning_rate": 3.270513696097055e-06, |
|
"loss": 0.5149, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 9.266802443991853, |
|
"grad_norm": 0.371489554643631, |
|
"learning_rate": 3.092271377092215e-06, |
|
"loss": 0.5155, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 9.287169042769857, |
|
"grad_norm": 0.36911362409591675, |
|
"learning_rate": 2.9189469518666967e-06, |
|
"loss": 0.5125, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 9.307535641547862, |
|
"grad_norm": 0.37086358666419983, |
|
"learning_rate": 2.7505492163248934e-06, |
|
"loss": 0.5116, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 9.327902240325866, |
|
"grad_norm": 0.37481385469436646, |
|
"learning_rate": 2.587086716350473e-06, |
|
"loss": 0.5127, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 9.34826883910387, |
|
"grad_norm": 0.3686206638813019, |
|
"learning_rate": 2.4285677473727118e-06, |
|
"loss": 0.5168, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 9.368635437881874, |
|
"grad_norm": 0.37265482544898987, |
|
"learning_rate": 2.2750003539455998e-06, |
|
"loss": 0.5067, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 9.389002036659878, |
|
"grad_norm": 0.36506715416908264, |
|
"learning_rate": 2.1263923293394774e-06, |
|
"loss": 0.5135, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 9.409368635437882, |
|
"grad_norm": 0.36821919679641724, |
|
"learning_rate": 1.9827512151456173e-06, |
|
"loss": 0.514, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 9.429735234215887, |
|
"grad_norm": 0.3649906814098358, |
|
"learning_rate": 1.8440843008934561e-06, |
|
"loss": 0.5127, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 9.45010183299389, |
|
"grad_norm": 0.36437344551086426, |
|
"learning_rate": 1.7103986236807313e-06, |
|
"loss": 0.5087, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 9.470468431771893, |
|
"grad_norm": 0.36268892884254456, |
|
"learning_rate": 1.5817009678162685e-06, |
|
"loss": 0.5157, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 9.490835030549897, |
|
"grad_norm": 0.37203824520111084, |
|
"learning_rate": 1.4579978644757464e-06, |
|
"loss": 0.5097, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 9.511201629327902, |
|
"grad_norm": 0.3669569492340088, |
|
"learning_rate": 1.339295591370271e-06, |
|
"loss": 0.5134, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 9.531568228105906, |
|
"grad_norm": 0.37304890155792236, |
|
"learning_rate": 1.2256001724277321e-06, |
|
"loss": 0.5177, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 9.55193482688391, |
|
"grad_norm": 0.36801040172576904, |
|
"learning_rate": 1.1169173774871478e-06, |
|
"loss": 0.5135, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 9.572301425661914, |
|
"grad_norm": 0.3649773895740509, |
|
"learning_rate": 1.013252722005842e-06, |
|
"loss": 0.5096, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 9.592668024439918, |
|
"grad_norm": 0.3708580732345581, |
|
"learning_rate": 9.146114667795358e-07, |
|
"loss": 0.5084, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 9.613034623217922, |
|
"grad_norm": 0.3649328351020813, |
|
"learning_rate": 8.209986176753948e-07, |
|
"loss": 0.5143, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 9.633401221995927, |
|
"grad_norm": 0.3666220009326935, |
|
"learning_rate": 7.324189253779312e-07, |
|
"loss": 0.508, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 9.65376782077393, |
|
"grad_norm": 0.36750587821006775, |
|
"learning_rate": 6.488768851480087e-07, |
|
"loss": 0.51, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 9.674134419551935, |
|
"grad_norm": 0.36811190843582153, |
|
"learning_rate": 5.703767365946466e-07, |
|
"loss": 0.512, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 9.69450101832994, |
|
"grad_norm": 0.37304404377937317, |
|
"learning_rate": 4.969224634598591e-07, |
|
"loss": 0.5131, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 9.714867617107943, |
|
"grad_norm": 0.37467247247695923, |
|
"learning_rate": 4.2851779341654964e-07, |
|
"loss": 0.5167, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 9.735234215885948, |
|
"grad_norm": 0.37464529275894165, |
|
"learning_rate": 3.651661978793075e-07, |
|
"loss": 0.5116, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 9.755600814663952, |
|
"grad_norm": 0.36434635519981384, |
|
"learning_rate": 3.068708918281926e-07, |
|
"loss": 0.5088, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 9.775967413441956, |
|
"grad_norm": 0.3670872747898102, |
|
"learning_rate": 2.536348336456551e-07, |
|
"loss": 0.5172, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 9.79633401221996, |
|
"grad_norm": 0.36443930864334106, |
|
"learning_rate": 2.054607249663665e-07, |
|
"loss": 0.5135, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 9.816700610997962, |
|
"grad_norm": 0.3700142204761505, |
|
"learning_rate": 1.6235101054011824e-07, |
|
"loss": 0.5085, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 9.837067209775967, |
|
"grad_norm": 0.3709539473056793, |
|
"learning_rate": 1.2430787810776555e-07, |
|
"loss": 0.5164, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 9.85743380855397, |
|
"grad_norm": 0.36750784516334534, |
|
"learning_rate": 9.133325829017158e-08, |
|
"loss": 0.5163, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 9.877800407331975, |
|
"grad_norm": 0.3683048188686371, |
|
"learning_rate": 6.342882449029696e-08, |
|
"loss": 0.5113, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 9.89816700610998, |
|
"grad_norm": 0.37231889367103577, |
|
"learning_rate": 4.059599280819004e-08, |
|
"loss": 0.5136, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 9.918533604887983, |
|
"grad_norm": 0.36660727858543396, |
|
"learning_rate": 2.2835921969210917e-08, |
|
"loss": 0.5183, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 9.938900203665987, |
|
"grad_norm": 0.3696270287036896, |
|
"learning_rate": 1.0149513265145238e-08, |
|
"loss": 0.5079, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 9.959266802443992, |
|
"grad_norm": 0.36083751916885376, |
|
"learning_rate": 2.5374105085518295e-09, |
|
"loss": 0.5151, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 9.979633401221996, |
|
"grad_norm": 0.3677830100059509, |
|
"learning_rate": 0.0, |
|
"loss": 0.508, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 9.979633401221996, |
|
"eval_loss": 2.0012366771698, |
|
"eval_runtime": 0.3701, |
|
"eval_samples_per_second": 32.425, |
|
"eval_steps_per_second": 2.702, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 9.979633401221996, |
|
"step": 2450, |
|
"total_flos": 6.866432485157241e+18, |
|
"train_loss": 0.5986210168137842, |
|
"train_runtime": 9994.3917, |
|
"train_samples_per_second": 15.694, |
|
"train_steps_per_second": 0.245 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2450, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.866432485157241e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|