|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.999793431109275, |
|
"eval_steps": 500, |
|
"global_step": 1210, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008262755629002272, |
|
"grad_norm": 23.554044233313164, |
|
"learning_rate": 8.264462809917357e-08, |
|
"loss": 1.4094, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004131377814501136, |
|
"grad_norm": 23.487881173685214, |
|
"learning_rate": 4.132231404958678e-07, |
|
"loss": 1.44, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008262755629002272, |
|
"grad_norm": 8.324804089201631, |
|
"learning_rate": 8.264462809917356e-07, |
|
"loss": 1.3352, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01239413344350341, |
|
"grad_norm": 10.482872108095675, |
|
"learning_rate": 1.2396694214876035e-06, |
|
"loss": 1.1699, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.016525511258004544, |
|
"grad_norm": 3.0017247032336667, |
|
"learning_rate": 1.6528925619834712e-06, |
|
"loss": 1.0235, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02065688907250568, |
|
"grad_norm": 2.760976256425166, |
|
"learning_rate": 2.066115702479339e-06, |
|
"loss": 0.9675, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02478826688700682, |
|
"grad_norm": 2.376505582612684, |
|
"learning_rate": 2.479338842975207e-06, |
|
"loss": 0.9406, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.028919644701507954, |
|
"grad_norm": 2.2154227436349765, |
|
"learning_rate": 2.8925619834710743e-06, |
|
"loss": 0.9302, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03305102251600909, |
|
"grad_norm": 2.3739150674432445, |
|
"learning_rate": 3.3057851239669424e-06, |
|
"loss": 0.8994, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03718240033051023, |
|
"grad_norm": 2.2109809527940554, |
|
"learning_rate": 3.71900826446281e-06, |
|
"loss": 0.8982, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04131377814501136, |
|
"grad_norm": 2.28822301545997, |
|
"learning_rate": 4.132231404958678e-06, |
|
"loss": 0.8837, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0454451559595125, |
|
"grad_norm": 2.2559023546514343, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.8754, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04957653377401364, |
|
"grad_norm": 2.2137771037984684, |
|
"learning_rate": 4.958677685950414e-06, |
|
"loss": 0.8582, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05370791158851477, |
|
"grad_norm": 2.373934998656087, |
|
"learning_rate": 5.371900826446281e-06, |
|
"loss": 0.8593, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05783928940301591, |
|
"grad_norm": 2.467630003052232, |
|
"learning_rate": 5.785123966942149e-06, |
|
"loss": 0.856, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06197066721751704, |
|
"grad_norm": 2.424099124838778, |
|
"learning_rate": 6.198347107438017e-06, |
|
"loss": 0.853, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06610204503201818, |
|
"grad_norm": 2.362990499630114, |
|
"learning_rate": 6.611570247933885e-06, |
|
"loss": 0.8383, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07023342284651932, |
|
"grad_norm": 2.399473277862941, |
|
"learning_rate": 7.0247933884297525e-06, |
|
"loss": 0.8318, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07436480066102046, |
|
"grad_norm": 2.5700793481674755, |
|
"learning_rate": 7.43801652892562e-06, |
|
"loss": 0.8303, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07849617847552158, |
|
"grad_norm": 2.3911560791916466, |
|
"learning_rate": 7.851239669421489e-06, |
|
"loss": 0.8283, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08262755629002272, |
|
"grad_norm": 2.5970628827400057, |
|
"learning_rate": 8.264462809917356e-06, |
|
"loss": 0.8249, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08675893410452386, |
|
"grad_norm": 2.3382293960099143, |
|
"learning_rate": 8.677685950413224e-06, |
|
"loss": 0.8186, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.090890311919025, |
|
"grad_norm": 2.423365953954326, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.8202, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09502168973352614, |
|
"grad_norm": 2.5282725418752694, |
|
"learning_rate": 9.50413223140496e-06, |
|
"loss": 0.8122, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.09915306754802727, |
|
"grad_norm": 2.3639158309512522, |
|
"learning_rate": 9.917355371900828e-06, |
|
"loss": 0.8127, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1032844453625284, |
|
"grad_norm": 2.844448120421469, |
|
"learning_rate": 9.999667111219573e-06, |
|
"loss": 0.7962, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.10741582317702954, |
|
"grad_norm": 3.443216907128371, |
|
"learning_rate": 9.998314826517564e-06, |
|
"loss": 0.8031, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11154720099153068, |
|
"grad_norm": 2.681450835682187, |
|
"learning_rate": 9.995922621477252e-06, |
|
"loss": 0.805, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11567857880603181, |
|
"grad_norm": 2.41859239043235, |
|
"learning_rate": 9.99249099380692e-06, |
|
"loss": 0.7891, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11980995662053295, |
|
"grad_norm": 2.2320583844472375, |
|
"learning_rate": 9.988020657471078e-06, |
|
"loss": 0.7945, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12394133443503408, |
|
"grad_norm": 2.6404700133557064, |
|
"learning_rate": 9.98251254254193e-06, |
|
"loss": 0.7789, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12807271224953523, |
|
"grad_norm": 2.4700012252546855, |
|
"learning_rate": 9.97596779500586e-06, |
|
"loss": 0.7776, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13220409006403636, |
|
"grad_norm": 2.534567680863635, |
|
"learning_rate": 9.968387776525009e-06, |
|
"loss": 0.7665, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13633546787853748, |
|
"grad_norm": 2.3195785880081274, |
|
"learning_rate": 9.959774064153977e-06, |
|
"loss": 0.7633, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14046684569303863, |
|
"grad_norm": 2.6303462252799124, |
|
"learning_rate": 9.950128450011706e-06, |
|
"loss": 0.7587, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14459822350753976, |
|
"grad_norm": 2.4452240563950687, |
|
"learning_rate": 9.939452940908627e-06, |
|
"loss": 0.7627, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1487296013220409, |
|
"grad_norm": 2.3656117475844294, |
|
"learning_rate": 9.927749757929125e-06, |
|
"loss": 0.7546, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.15286097913654204, |
|
"grad_norm": 2.307155404533025, |
|
"learning_rate": 9.915021335969452e-06, |
|
"loss": 0.744, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15699235695104316, |
|
"grad_norm": 2.3398594948394127, |
|
"learning_rate": 9.901270323231114e-06, |
|
"loss": 0.7423, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1611237347655443, |
|
"grad_norm": 2.3156292065612214, |
|
"learning_rate": 9.886499580669917e-06, |
|
"loss": 0.7396, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16525511258004544, |
|
"grad_norm": 2.574743453191487, |
|
"learning_rate": 9.870712181400726e-06, |
|
"loss": 0.7335, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1693864903945466, |
|
"grad_norm": 2.339577898310028, |
|
"learning_rate": 9.853911410058097e-06, |
|
"loss": 0.7398, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.17351786820904772, |
|
"grad_norm": 2.489900169912843, |
|
"learning_rate": 9.836100762112887e-06, |
|
"loss": 0.7423, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.17764924602354884, |
|
"grad_norm": 2.5114661447059365, |
|
"learning_rate": 9.817283943145014e-06, |
|
"loss": 0.7273, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.18178062383805, |
|
"grad_norm": 2.4765147278897097, |
|
"learning_rate": 9.797464868072489e-06, |
|
"loss": 0.7171, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.18591200165255112, |
|
"grad_norm": 2.35952144225102, |
|
"learning_rate": 9.776647660336905e-06, |
|
"loss": 0.7165, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19004337946705227, |
|
"grad_norm": 2.3928640553968115, |
|
"learning_rate": 9.754836651045538e-06, |
|
"loss": 0.7228, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1941747572815534, |
|
"grad_norm": 2.359655168950183, |
|
"learning_rate": 9.732036378070243e-06, |
|
"loss": 0.6999, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.19830613509605455, |
|
"grad_norm": 2.7166804592398854, |
|
"learning_rate": 9.708251585103324e-06, |
|
"loss": 0.6924, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.20243751291055567, |
|
"grad_norm": 2.6360301220005415, |
|
"learning_rate": 9.683487220670595e-06, |
|
"loss": 0.7038, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2065688907250568, |
|
"grad_norm": 2.358814454829314, |
|
"learning_rate": 9.657748437101819e-06, |
|
"loss": 0.7076, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21070026853955795, |
|
"grad_norm": 2.250240362766412, |
|
"learning_rate": 9.631040589458742e-06, |
|
"loss": 0.6833, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.21483164635405907, |
|
"grad_norm": 2.4709639248779083, |
|
"learning_rate": 9.603369234420944e-06, |
|
"loss": 0.6751, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.21896302416856023, |
|
"grad_norm": 2.315474134768072, |
|
"learning_rate": 9.574740129129767e-06, |
|
"loss": 0.6988, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.22309440198306135, |
|
"grad_norm": 2.2090103807004975, |
|
"learning_rate": 9.545159229990493e-06, |
|
"loss": 0.6854, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.22722577979756248, |
|
"grad_norm": 2.366702578463947, |
|
"learning_rate": 9.514632691433108e-06, |
|
"loss": 0.6657, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.23135715761206363, |
|
"grad_norm": 2.298899028385364, |
|
"learning_rate": 9.483166864631837e-06, |
|
"loss": 0.663, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.23548853542656475, |
|
"grad_norm": 2.2714246758736394, |
|
"learning_rate": 9.450768296183764e-06, |
|
"loss": 0.6655, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2396199132410659, |
|
"grad_norm": 2.1782463852503438, |
|
"learning_rate": 9.417443726746776e-06, |
|
"loss": 0.6678, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.24375129105556703, |
|
"grad_norm": 2.206125943355958, |
|
"learning_rate": 9.383200089637143e-06, |
|
"loss": 0.6579, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.24788266887006816, |
|
"grad_norm": 2.4822099384962133, |
|
"learning_rate": 9.348044509387022e-06, |
|
"loss": 0.6688, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2520140466845693, |
|
"grad_norm": 2.6347462586981827, |
|
"learning_rate": 9.311984300262151e-06, |
|
"loss": 0.6723, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.25614542449907046, |
|
"grad_norm": 2.3969886300206915, |
|
"learning_rate": 9.275026964740101e-06, |
|
"loss": 0.6495, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2602768023135716, |
|
"grad_norm": 2.2255060581251733, |
|
"learning_rate": 9.237180191949347e-06, |
|
"loss": 0.6548, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.2644081801280727, |
|
"grad_norm": 2.4470079917886585, |
|
"learning_rate": 9.198451856069514e-06, |
|
"loss": 0.6402, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.26853955794257384, |
|
"grad_norm": 2.232598978818489, |
|
"learning_rate": 9.158850014693123e-06, |
|
"loss": 0.6428, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.27267093575707496, |
|
"grad_norm": 2.2780042831807807, |
|
"learning_rate": 9.118382907149164e-06, |
|
"loss": 0.6449, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.27680231357157614, |
|
"grad_norm": 2.250070488730356, |
|
"learning_rate": 9.077058952788888e-06, |
|
"loss": 0.6248, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.28093369138607727, |
|
"grad_norm": 2.257817990210915, |
|
"learning_rate": 9.034886749234112e-06, |
|
"loss": 0.6287, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2850650692005784, |
|
"grad_norm": 2.2753575010897373, |
|
"learning_rate": 8.991875070588449e-06, |
|
"loss": 0.631, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2891964470150795, |
|
"grad_norm": 2.5652909583991534, |
|
"learning_rate": 8.948032865611823e-06, |
|
"loss": 0.6232, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.29332782482958064, |
|
"grad_norm": 2.3365773173303843, |
|
"learning_rate": 8.90336925585864e-06, |
|
"loss": 0.608, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2974592026440818, |
|
"grad_norm": 2.27141916228481, |
|
"learning_rate": 8.857893533780015e-06, |
|
"loss": 0.6048, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.30159058045858295, |
|
"grad_norm": 2.121899092032811, |
|
"learning_rate": 8.811615160790428e-06, |
|
"loss": 0.6192, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.30572195827308407, |
|
"grad_norm": 2.210463830947181, |
|
"learning_rate": 8.764543765299245e-06, |
|
"loss": 0.611, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3098533360875852, |
|
"grad_norm": 2.2581243380183835, |
|
"learning_rate": 8.716689140707488e-06, |
|
"loss": 0.6116, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3139847139020863, |
|
"grad_norm": 2.291459985719934, |
|
"learning_rate": 8.668061243370273e-06, |
|
"loss": 0.6094, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3181160917165875, |
|
"grad_norm": 2.242064952311342, |
|
"learning_rate": 8.61867019052535e-06, |
|
"loss": 0.6076, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3222474695310886, |
|
"grad_norm": 2.3770181909891304, |
|
"learning_rate": 8.568526258188172e-06, |
|
"loss": 0.6047, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.32637884734558975, |
|
"grad_norm": 2.4304957747194593, |
|
"learning_rate": 8.517639879013918e-06, |
|
"loss": 0.6037, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3305102251600909, |
|
"grad_norm": 2.159929931604691, |
|
"learning_rate": 8.466021640126946e-06, |
|
"loss": 0.5924, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.334641602974592, |
|
"grad_norm": 2.181588993237923, |
|
"learning_rate": 8.413682280918094e-06, |
|
"loss": 0.5859, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3387729807890932, |
|
"grad_norm": 2.239848195768108, |
|
"learning_rate": 8.3606326908103e-06, |
|
"loss": 0.5808, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3429043586035943, |
|
"grad_norm": 2.2395305817254165, |
|
"learning_rate": 8.306883906993022e-06, |
|
"loss": 0.5805, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.34703573641809543, |
|
"grad_norm": 2.276771540411031, |
|
"learning_rate": 8.25244711212589e-06, |
|
"loss": 0.5771, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.35116711423259656, |
|
"grad_norm": 2.2443733616231674, |
|
"learning_rate": 8.197333632012123e-06, |
|
"loss": 0.5724, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3552984920470977, |
|
"grad_norm": 2.266086543452868, |
|
"learning_rate": 8.141554933242135e-06, |
|
"loss": 0.5766, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.35942986986159886, |
|
"grad_norm": 2.3440174185212426, |
|
"learning_rate": 8.08512262080787e-06, |
|
"loss": 0.5704, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.3635612476761, |
|
"grad_norm": 2.231383713024198, |
|
"learning_rate": 8.028048435688333e-06, |
|
"loss": 0.5672, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3676926254906011, |
|
"grad_norm": 2.210345864732212, |
|
"learning_rate": 7.970344252406832e-06, |
|
"loss": 0.5654, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.37182400330510224, |
|
"grad_norm": 2.201487752141703, |
|
"learning_rate": 7.912022076560426e-06, |
|
"loss": 0.5608, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.37595538111960336, |
|
"grad_norm": 2.2126765922615017, |
|
"learning_rate": 7.853094042322121e-06, |
|
"loss": 0.5676, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.38008675893410454, |
|
"grad_norm": 2.368648057415258, |
|
"learning_rate": 7.7935724099163e-06, |
|
"loss": 0.5611, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.38421813674860567, |
|
"grad_norm": 2.249897659892324, |
|
"learning_rate": 7.733469563067928e-06, |
|
"loss": 0.5632, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.3883495145631068, |
|
"grad_norm": 2.257196050360634, |
|
"learning_rate": 7.67279800642607e-06, |
|
"loss": 0.5414, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3924808923776079, |
|
"grad_norm": 2.1223073675520125, |
|
"learning_rate": 7.611570362962247e-06, |
|
"loss": 0.5421, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3966122701921091, |
|
"grad_norm": 2.209118984554094, |
|
"learning_rate": 7.549799371344175e-06, |
|
"loss": 0.5475, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.4007436480066102, |
|
"grad_norm": 2.070603624522537, |
|
"learning_rate": 7.487497883285428e-06, |
|
"loss": 0.5477, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.40487502582111135, |
|
"grad_norm": 2.2718749779596097, |
|
"learning_rate": 7.424678860871584e-06, |
|
"loss": 0.5384, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.40900640363561247, |
|
"grad_norm": 2.347289860613089, |
|
"learning_rate": 7.361355373863415e-06, |
|
"loss": 0.5407, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4131377814501136, |
|
"grad_norm": 2.491186132317311, |
|
"learning_rate": 7.297540596977663e-06, |
|
"loss": 0.5393, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4172691592646148, |
|
"grad_norm": 2.3320063032579177, |
|
"learning_rate": 7.233247807145989e-06, |
|
"loss": 0.5311, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4214005370791159, |
|
"grad_norm": 2.122601227745636, |
|
"learning_rate": 7.168490380752648e-06, |
|
"loss": 0.537, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.425531914893617, |
|
"grad_norm": 2.2214499757842865, |
|
"learning_rate": 7.103281790851483e-06, |
|
"loss": 0.5338, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.42966329270811815, |
|
"grad_norm": 2.1654002355826494, |
|
"learning_rate": 7.037635604362786e-06, |
|
"loss": 0.5188, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4337946705226193, |
|
"grad_norm": 2.1599888645905847, |
|
"learning_rate": 6.971565479250659e-06, |
|
"loss": 0.5321, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.43792604833712045, |
|
"grad_norm": 2.4521064066537, |
|
"learning_rate": 6.905085161681408e-06, |
|
"loss": 0.5137, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4420574261516216, |
|
"grad_norm": 2.0735269808191563, |
|
"learning_rate": 6.838208483163601e-06, |
|
"loss": 0.5181, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4461888039661227, |
|
"grad_norm": 2.1964532327451307, |
|
"learning_rate": 6.770949357670358e-06, |
|
"loss": 0.5079, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.45032018178062383, |
|
"grad_norm": 2.1721038727423996, |
|
"learning_rate": 6.703321778744495e-06, |
|
"loss": 0.5169, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.45445155959512495, |
|
"grad_norm": 2.083001885275941, |
|
"learning_rate": 6.635339816587109e-06, |
|
"loss": 0.5189, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.45858293740962613, |
|
"grad_norm": 2.2406805358253523, |
|
"learning_rate": 6.5670176151302136e-06, |
|
"loss": 0.5056, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.46271431522412726, |
|
"grad_norm": 2.362515256614418, |
|
"learning_rate": 6.4983693890940335e-06, |
|
"loss": 0.5068, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.4668456930386284, |
|
"grad_norm": 2.073329405987291, |
|
"learning_rate": 6.4294094210295725e-06, |
|
"loss": 0.5146, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.4709770708531295, |
|
"grad_norm": 2.0802359451245276, |
|
"learning_rate": 6.360152058347068e-06, |
|
"loss": 0.4993, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.47510844866763063, |
|
"grad_norm": 2.205382348988315, |
|
"learning_rate": 6.290611710330957e-06, |
|
"loss": 0.5056, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4792398264821318, |
|
"grad_norm": 2.106317834822583, |
|
"learning_rate": 6.2208028451419575e-06, |
|
"loss": 0.4939, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.48337120429663294, |
|
"grad_norm": 2.0997265512356953, |
|
"learning_rate": 6.150739986806911e-06, |
|
"loss": 0.4904, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.48750258211113406, |
|
"grad_norm": 2.087397537314153, |
|
"learning_rate": 6.0804377121969985e-06, |
|
"loss": 0.4903, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.4916339599256352, |
|
"grad_norm": 2.1772365057315834, |
|
"learning_rate": 6.009910647994956e-06, |
|
"loss": 0.5043, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.4957653377401363, |
|
"grad_norm": 2.0357765671221872, |
|
"learning_rate": 5.939173467651942e-06, |
|
"loss": 0.4916, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4998967155546375, |
|
"grad_norm": 2.140953972933528, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.4881, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5040280933691386, |
|
"grad_norm": 2.133259953600996, |
|
"learning_rate": 5.7971276678633625e-06, |
|
"loss": 0.4925, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5081594711836398, |
|
"grad_norm": 2.0897840178036753, |
|
"learning_rate": 5.725848601641492e-06, |
|
"loss": 0.4909, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5122908489981409, |
|
"grad_norm": 2.1033344267926553, |
|
"learning_rate": 5.654418519577369e-06, |
|
"loss": 0.4808, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.516422226812642, |
|
"grad_norm": 2.0931483592499927, |
|
"learning_rate": 5.5828522829987965e-06, |
|
"loss": 0.4697, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5205536046271432, |
|
"grad_norm": 2.0194919084584444, |
|
"learning_rate": 5.511164781561096e-06, |
|
"loss": 0.479, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5246849824416443, |
|
"grad_norm": 2.070247481858063, |
|
"learning_rate": 5.439370930149252e-06, |
|
"loss": 0.4705, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5288163602561454, |
|
"grad_norm": 2.060542153916691, |
|
"learning_rate": 5.367485665774802e-06, |
|
"loss": 0.4689, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5329477380706465, |
|
"grad_norm": 2.171769190139748, |
|
"learning_rate": 5.295523944468137e-06, |
|
"loss": 0.4678, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.5370791158851477, |
|
"grad_norm": 2.437387629266701, |
|
"learning_rate": 5.223500738166837e-06, |
|
"loss": 0.4723, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5412104936996488, |
|
"grad_norm": 2.139213174417897, |
|
"learning_rate": 5.1514310316006835e-06, |
|
"loss": 0.472, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.5453418715141499, |
|
"grad_norm": 2.083008247659044, |
|
"learning_rate": 5.07932981917404e-06, |
|
"loss": 0.4684, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5494732493286512, |
|
"grad_norm": 2.161242770803335, |
|
"learning_rate": 5.007212101846194e-06, |
|
"loss": 0.4587, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.5536046271431523, |
|
"grad_norm": 2.0500783532272604, |
|
"learning_rate": 4.935092884010347e-06, |
|
"loss": 0.4611, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5577360049576534, |
|
"grad_norm": 2.1439435869325116, |
|
"learning_rate": 4.8629871703718844e-06, |
|
"loss": 0.4622, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5618673827721545, |
|
"grad_norm": 2.066743306657874, |
|
"learning_rate": 4.7909099628265946e-06, |
|
"loss": 0.4672, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5659987605866557, |
|
"grad_norm": 2.0461947863660574, |
|
"learning_rate": 4.718876257339444e-06, |
|
"loss": 0.4512, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.5701301384011568, |
|
"grad_norm": 2.13397891500544, |
|
"learning_rate": 4.646901040824622e-06, |
|
"loss": 0.4568, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5742615162156579, |
|
"grad_norm": 2.038909744378958, |
|
"learning_rate": 4.574999288027437e-06, |
|
"loss": 0.4505, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.578392894030159, |
|
"grad_norm": 2.061119461177199, |
|
"learning_rate": 4.503185958408767e-06, |
|
"loss": 0.4536, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5825242718446602, |
|
"grad_norm": 2.147709730965502, |
|
"learning_rate": 4.431475993032673e-06, |
|
"loss": 0.4567, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.5866556496591613, |
|
"grad_norm": 2.0471799173895078, |
|
"learning_rate": 4.359884311457857e-06, |
|
"loss": 0.452, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5907870274736625, |
|
"grad_norm": 2.096762090340259, |
|
"learning_rate": 4.2884258086335755e-06, |
|
"loss": 0.4407, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.5949184052881636, |
|
"grad_norm": 2.1776523990414436, |
|
"learning_rate": 4.217115351800693e-06, |
|
"loss": 0.4364, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5990497831026648, |
|
"grad_norm": 2.148545496231866, |
|
"learning_rate": 4.145967777398481e-06, |
|
"loss": 0.4407, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6031811609171659, |
|
"grad_norm": 2.0382917606214326, |
|
"learning_rate": 4.074997887977843e-06, |
|
"loss": 0.4339, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.607312538731667, |
|
"grad_norm": 2.096587532898138, |
|
"learning_rate": 4.004220449121574e-06, |
|
"loss": 0.4337, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6114439165461681, |
|
"grad_norm": 2.0260734306608272, |
|
"learning_rate": 3.933650186372329e-06, |
|
"loss": 0.4297, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6155752943606693, |
|
"grad_norm": 2.0747974830236116, |
|
"learning_rate": 3.863301782168896e-06, |
|
"loss": 0.4362, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6197066721751704, |
|
"grad_norm": 2.1542278120116154, |
|
"learning_rate": 3.7931898727914723e-06, |
|
"loss": 0.4413, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6238380499896715, |
|
"grad_norm": 1.9775710411271903, |
|
"learning_rate": 3.7233290453165127e-06, |
|
"loss": 0.4273, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6279694278041726, |
|
"grad_norm": 2.034389521374345, |
|
"learning_rate": 3.6537338345818273e-06, |
|
"loss": 0.4312, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.6321008056186739, |
|
"grad_norm": 2.059316170572532, |
|
"learning_rate": 3.5844187201625567e-06, |
|
"loss": 0.4301, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.636232183433175, |
|
"grad_norm": 2.0806349809281235, |
|
"learning_rate": 3.5153981233586277e-06, |
|
"loss": 0.4234, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.6403635612476761, |
|
"grad_norm": 2.1474742343641933, |
|
"learning_rate": 3.446686404194337e-06, |
|
"loss": 0.4228, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.6444949390621773, |
|
"grad_norm": 2.141080032663376, |
|
"learning_rate": 3.3782978584307035e-06, |
|
"loss": 0.4179, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.6486263168766784, |
|
"grad_norm": 2.0266388142571876, |
|
"learning_rate": 3.310246714591162e-06, |
|
"loss": 0.4197, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.6527576946911795, |
|
"grad_norm": 2.1015063906736873, |
|
"learning_rate": 3.2425471310012645e-06, |
|
"loss": 0.4224, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6568890725056806, |
|
"grad_norm": 2.0688211913933126, |
|
"learning_rate": 3.1752131928429787e-06, |
|
"loss": 0.4148, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.6610204503201818, |
|
"grad_norm": 2.020342446426012, |
|
"learning_rate": 3.1082589092242116e-06, |
|
"loss": 0.4209, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6651518281346829, |
|
"grad_norm": 2.050265450999902, |
|
"learning_rate": 3.041698210264149e-06, |
|
"loss": 0.4086, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.669283205949184, |
|
"grad_norm": 2.0414704146915117, |
|
"learning_rate": 2.9755449441950434e-06, |
|
"loss": 0.424, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6734145837636852, |
|
"grad_norm": 2.0309738852976924, |
|
"learning_rate": 2.9098128744810245e-06, |
|
"loss": 0.4136, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.6775459615781864, |
|
"grad_norm": 2.123876449787772, |
|
"learning_rate": 2.84451567695456e-06, |
|
"loss": 0.4213, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.6816773393926875, |
|
"grad_norm": 2.1356187469686043, |
|
"learning_rate": 2.7796669369711294e-06, |
|
"loss": 0.4216, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.6858087172071886, |
|
"grad_norm": 2.06757675819696, |
|
"learning_rate": 2.715280146582752e-06, |
|
"loss": 0.4192, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.6899400950216897, |
|
"grad_norm": 2.0954350411098916, |
|
"learning_rate": 2.651368701730889e-06, |
|
"loss": 0.4077, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.6940714728361909, |
|
"grad_norm": 1.9490264079119275, |
|
"learning_rate": 2.5879458994593786e-06, |
|
"loss": 0.4018, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.698202850650692, |
|
"grad_norm": 1.9862703131789983, |
|
"learning_rate": 2.5250249351479206e-06, |
|
"loss": 0.4127, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7023342284651931, |
|
"grad_norm": 1.9907644914298972, |
|
"learning_rate": 2.4626188997667224e-06, |
|
"loss": 0.4057, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7064656062796942, |
|
"grad_norm": 2.0178385478283, |
|
"learning_rate": 2.400740777152874e-06, |
|
"loss": 0.405, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7105969840941954, |
|
"grad_norm": 2.0040936929718627, |
|
"learning_rate": 2.3394034413090015e-06, |
|
"loss": 0.4054, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7147283619086966, |
|
"grad_norm": 2.0032870121182516, |
|
"learning_rate": 2.278619653724781e-06, |
|
"loss": 0.4025, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7188597397231977, |
|
"grad_norm": 2.0315969629289867, |
|
"learning_rate": 2.218402060721845e-06, |
|
"loss": 0.3929, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7229911175376988, |
|
"grad_norm": 2.0727282353845626, |
|
"learning_rate": 2.1587631908226812e-06, |
|
"loss": 0.3986, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7271224953522, |
|
"grad_norm": 1.9152876057461066, |
|
"learning_rate": 2.09971545214401e-06, |
|
"loss": 0.3915, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7312538731667011, |
|
"grad_norm": 2.006764866181322, |
|
"learning_rate": 2.04127112981522e-06, |
|
"loss": 0.3972, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.7353852509812022, |
|
"grad_norm": 2.009533139724546, |
|
"learning_rate": 1.9834423834224014e-06, |
|
"loss": 0.3952, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.7395166287957033, |
|
"grad_norm": 1.9136982154678623, |
|
"learning_rate": 1.926241244478496e-06, |
|
"loss": 0.3894, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.7436480066102045, |
|
"grad_norm": 1.9940424884511188, |
|
"learning_rate": 1.8696796139200852e-06, |
|
"loss": 0.3964, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7477793844247056, |
|
"grad_norm": 1.9232507318610614, |
|
"learning_rate": 1.8137692596313528e-06, |
|
"loss": 0.39, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.7519107622392067, |
|
"grad_norm": 1.9669358048444339, |
|
"learning_rate": 1.7585218139957205e-06, |
|
"loss": 0.4089, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.756042140053708, |
|
"grad_norm": 2.0275135426829007, |
|
"learning_rate": 1.7039487714756953e-06, |
|
"loss": 0.3897, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.7601735178682091, |
|
"grad_norm": 1.9145558326260135, |
|
"learning_rate": 1.6500614862213866e-06, |
|
"loss": 0.3845, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.7643048956827102, |
|
"grad_norm": 1.946476089351215, |
|
"learning_rate": 1.596871169708235e-06, |
|
"loss": 0.3827, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.7684362734972113, |
|
"grad_norm": 2.061234713938133, |
|
"learning_rate": 1.54438888840441e-06, |
|
"loss": 0.3865, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7725676513117125, |
|
"grad_norm": 2.0046087896210993, |
|
"learning_rate": 1.4926255614683931e-06, |
|
"loss": 0.3919, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.7766990291262136, |
|
"grad_norm": 2.013100573926538, |
|
"learning_rate": 1.4415919584771999e-06, |
|
"loss": 0.3791, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.7808304069407147, |
|
"grad_norm": 2.052665994161596, |
|
"learning_rate": 1.3912986971857168e-06, |
|
"loss": 0.3813, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.7849617847552158, |
|
"grad_norm": 1.9665758334089818, |
|
"learning_rate": 1.3417562413176405e-06, |
|
"loss": 0.3743, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.789093162569717, |
|
"grad_norm": 1.9653082426188921, |
|
"learning_rate": 1.292974898388456e-06, |
|
"loss": 0.3884, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.7932245403842182, |
|
"grad_norm": 2.1523771874487707, |
|
"learning_rate": 1.2449648175609115e-06, |
|
"loss": 0.383, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.7973559181987193, |
|
"grad_norm": 1.8532885315782601, |
|
"learning_rate": 1.1977359875334454e-06, |
|
"loss": 0.3812, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.8014872960132204, |
|
"grad_norm": 2.0616460511941477, |
|
"learning_rate": 1.1512982344619904e-06, |
|
"loss": 0.3742, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8056186738277216, |
|
"grad_norm": 2.000103259130858, |
|
"learning_rate": 1.1056612199156093e-06, |
|
"loss": 0.3795, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.8097500516422227, |
|
"grad_norm": 2.0764477975903404, |
|
"learning_rate": 1.0608344388663576e-06, |
|
"loss": 0.3732, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.8138814294567238, |
|
"grad_norm": 2.0844020934084315, |
|
"learning_rate": 1.01682721771382e-06, |
|
"loss": 0.3738, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.8180128072712249, |
|
"grad_norm": 2.050710610881507, |
|
"learning_rate": 9.73648712344707e-07, |
|
"loss": 0.373, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.8221441850857261, |
|
"grad_norm": 2.068871529971891, |
|
"learning_rate": 9.313079062279429e-07, |
|
"loss": 0.3852, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.8262755629002272, |
|
"grad_norm": 1.9622479084933468, |
|
"learning_rate": 8.898136085456127e-07, |
|
"loss": 0.3839, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8304069407147283, |
|
"grad_norm": 2.0489239754877913, |
|
"learning_rate": 8.491744523601741e-07, |
|
"loss": 0.3789, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.8345383185292296, |
|
"grad_norm": 1.97405744731925, |
|
"learning_rate": 8.093988928183127e-07, |
|
"loss": 0.3659, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.8386696963437307, |
|
"grad_norm": 2.0649700701955966, |
|
"learning_rate": 7.70495205391818e-07, |
|
"loss": 0.3684, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.8428010741582318, |
|
"grad_norm": 1.9711762707705944, |
|
"learning_rate": 7.324714841558322e-07, |
|
"loss": 0.3728, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.8469324519727329, |
|
"grad_norm": 2.089515078127623, |
|
"learning_rate": 6.953356401048466e-07, |
|
"loss": 0.361, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.851063829787234, |
|
"grad_norm": 2.0969140211573536, |
|
"learning_rate": 6.590953995067812e-07, |
|
"loss": 0.3621, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.8551952076017352, |
|
"grad_norm": 2.014897877209678, |
|
"learning_rate": 6.237583022955079e-07, |
|
"loss": 0.377, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.8593265854162363, |
|
"grad_norm": 2.0616530107992617, |
|
"learning_rate": 5.89331700502136e-07, |
|
"loss": 0.373, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.8634579632307374, |
|
"grad_norm": 2.0017552200191164, |
|
"learning_rate": 5.558227567253832e-07, |
|
"loss": 0.3738, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.8675893410452385, |
|
"grad_norm": 2.0884355733341415, |
|
"learning_rate": 5.232384426413772e-07, |
|
"loss": 0.3625, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.8717207188597397, |
|
"grad_norm": 2.111748293695631, |
|
"learning_rate": 4.915855375531642e-07, |
|
"loss": 0.3789, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.8758520966742409, |
|
"grad_norm": 2.0363025293506305, |
|
"learning_rate": 4.608706269802471e-07, |
|
"loss": 0.3615, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.879983474488742, |
|
"grad_norm": 2.0861363712497742, |
|
"learning_rate": 4.3110010128843747e-07, |
|
"loss": 0.3803, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.8841148523032432, |
|
"grad_norm": 1.9794223337927541, |
|
"learning_rate": 4.022801543603194e-07, |
|
"loss": 0.37, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.8882462301177443, |
|
"grad_norm": 1.976468279453303, |
|
"learning_rate": 3.744167823065814e-07, |
|
"loss": 0.3511, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.8923776079322454, |
|
"grad_norm": 2.0814002530931583, |
|
"learning_rate": 3.4751578221850377e-07, |
|
"loss": 0.3611, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.8965089857467465, |
|
"grad_norm": 2.0984819483954844, |
|
"learning_rate": 3.2158275096184744e-07, |
|
"loss": 0.3562, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.9006403635612477, |
|
"grad_norm": 2.144295712901401, |
|
"learning_rate": 2.966230840124007e-07, |
|
"loss": 0.3674, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.9047717413757488, |
|
"grad_norm": 2.118969417696939, |
|
"learning_rate": 2.72641974333433e-07, |
|
"loss": 0.3622, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.9089031191902499, |
|
"grad_norm": 2.0003217874891694, |
|
"learning_rate": 2.4964441129527337e-07, |
|
"loss": 0.3697, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.913034497004751, |
|
"grad_norm": 2.052789066548848, |
|
"learning_rate": 2.2763517963725169e-07, |
|
"loss": 0.3709, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.9171658748192523, |
|
"grad_norm": 1.9424275069967558, |
|
"learning_rate": 2.0661885847221398e-07, |
|
"loss": 0.3672, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.9212972526337534, |
|
"grad_norm": 2.030858945131136, |
|
"learning_rate": 1.8659982033381928e-07, |
|
"loss": 0.3675, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.9254286304482545, |
|
"grad_norm": 2.0600092854113132, |
|
"learning_rate": 1.6758223026681507e-07, |
|
"loss": 0.3636, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9295600082627556, |
|
"grad_norm": 2.0750384756960374, |
|
"learning_rate": 1.4957004496048256e-07, |
|
"loss": 0.3656, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.9336913860772568, |
|
"grad_norm": 2.006693676746082, |
|
"learning_rate": 1.3256701192542853e-07, |
|
"loss": 0.3676, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9378227638917579, |
|
"grad_norm": 2.114218265869402, |
|
"learning_rate": 1.1657666871390471e-07, |
|
"loss": 0.3658, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.941954141706259, |
|
"grad_norm": 2.0597089907860537, |
|
"learning_rate": 1.0160234218380305e-07, |
|
"loss": 0.3573, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.9460855195207601, |
|
"grad_norm": 2.0898079361017334, |
|
"learning_rate": 8.76471478064872e-08, |
|
"loss": 0.3724, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.9502168973352613, |
|
"grad_norm": 2.0371025770189632, |
|
"learning_rate": 7.471398901860772e-08, |
|
"loss": 0.3625, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9543482751497624, |
|
"grad_norm": 1.938944230136909, |
|
"learning_rate": 6.280555661802857e-08, |
|
"loss": 0.3634, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.9584796529642636, |
|
"grad_norm": 2.1345661558712967, |
|
"learning_rate": 5.192432820399718e-08, |
|
"loss": 0.3674, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.9626110307787648, |
|
"grad_norm": 1.9759349002447268, |
|
"learning_rate": 4.207256766166845e-08, |
|
"loss": 0.3621, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.9667424085932659, |
|
"grad_norm": 2.0037876312026213, |
|
"learning_rate": 3.3252324691093185e-08, |
|
"loss": 0.3645, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.970873786407767, |
|
"grad_norm": 2.059268789116945, |
|
"learning_rate": 2.546543438077087e-08, |
|
"loss": 0.3569, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.9750051642222681, |
|
"grad_norm": 2.1019547322975916, |
|
"learning_rate": 1.8713516825851207e-08, |
|
"loss": 0.3604, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.9791365420367693, |
|
"grad_norm": 2.067476136669, |
|
"learning_rate": 1.2997976791065403e-08, |
|
"loss": 0.3611, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.9832679198512704, |
|
"grad_norm": 1.970947575751658, |
|
"learning_rate": 8.32000341846162e-09, |
|
"loss": 0.3626, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.9873992976657715, |
|
"grad_norm": 2.0213770050103816, |
|
"learning_rate": 4.6805699799967744e-09, |
|
"loss": 0.3577, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.9915306754802726, |
|
"grad_norm": 2.0031463180853915, |
|
"learning_rate": 2.0804336750429588e-09, |
|
"loss": 0.3594, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.9956620532947738, |
|
"grad_norm": 2.0391505951802165, |
|
"learning_rate": 5.201354728517905e-10, |
|
"loss": 0.3624, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.999793431109275, |
|
"grad_norm": 2.0674337207917235, |
|
"learning_rate": 0.0, |
|
"loss": 0.3717, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.999793431109275, |
|
"eval_loss": 0.3491726517677307, |
|
"eval_runtime": 125.1809, |
|
"eval_samples_per_second": 3.115, |
|
"eval_steps_per_second": 0.783, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.999793431109275, |
|
"step": 1210, |
|
"total_flos": 253297038458880.0, |
|
"train_loss": 0.5434080246066259, |
|
"train_runtime": 27449.6475, |
|
"train_samples_per_second": 1.411, |
|
"train_steps_per_second": 0.044 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1210, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 253297038458880.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|