|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 520, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.14224752705278207, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 1.2184, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.13625734874024836, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 1.2229, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2183581721176969, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 1.1951, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2732295826792407, |
|
"learning_rate": 5.769230769230769e-05, |
|
"loss": 1.1787, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3421919479653908, |
|
"learning_rate": 7.692307692307693e-05, |
|
"loss": 1.137, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.2521087325682044, |
|
"learning_rate": 9.615384615384617e-05, |
|
"loss": 1.0638, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.2450437686862467, |
|
"learning_rate": 0.00011538461538461538, |
|
"loss": 0.9419, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.15724697110363808, |
|
"learning_rate": 0.00013461538461538464, |
|
"loss": 0.882, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.11027689524889575, |
|
"learning_rate": 0.00015384615384615385, |
|
"loss": 0.8141, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.1548880123404284, |
|
"learning_rate": 0.0001730769230769231, |
|
"loss": 0.8662, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.06952349939260559, |
|
"learning_rate": 0.00019230769230769233, |
|
"loss": 0.8216, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.08081541900240145, |
|
"learning_rate": 0.00019997972289848503, |
|
"loss": 0.8365, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.07647251365893909, |
|
"learning_rate": 0.00019985583705641418, |
|
"loss": 0.778, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.06873720351393725, |
|
"learning_rate": 0.00019961946980917456, |
|
"loss": 0.8149, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.07284335067514863, |
|
"learning_rate": 0.0001992708874098054, |
|
"loss": 0.7723, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.061816388971901864, |
|
"learning_rate": 0.0001988104825147528, |
|
"loss": 0.7903, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.06880609874097408, |
|
"learning_rate": 0.00019823877374156647, |
|
"loss": 0.8192, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.07276187547013381, |
|
"learning_rate": 0.00019755640508470942, |
|
"loss": 0.7538, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.06902019244104873, |
|
"learning_rate": 0.00019676414519013781, |
|
"loss": 0.8287, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.06796147789503805, |
|
"learning_rate": 0.00019586288648946947, |
|
"loss": 0.7512, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.09206670203503213, |
|
"learning_rate": 0.00019485364419471454, |
|
"loss": 0.7683, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.07628333351242977, |
|
"learning_rate": 0.00019373755515470254, |
|
"loss": 0.7772, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.06859896953869309, |
|
"learning_rate": 0.00019251587657449236, |
|
"loss": 0.7842, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.07423756650259968, |
|
"learning_rate": 0.00019118998459920902, |
|
"loss": 0.784, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.07101524662585225, |
|
"learning_rate": 0.0001897613727639014, |
|
"loss": 0.775, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.08383308492194629, |
|
"learning_rate": 0.0001882316503111678, |
|
"loss": 0.7923, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.07609154917414301, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.7796, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.10263340282880631, |
|
"learning_rate": 0.00018487587805699526, |
|
"loss": 0.8031, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.1260927633470495, |
|
"learning_rate": 0.00018305360832480117, |
|
"loss": 0.7883, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.08281016699121946, |
|
"learning_rate": 0.00018113778385565733, |
|
"loss": 0.7806, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.08678769805476597, |
|
"learning_rate": 0.0001791305627069662, |
|
"loss": 0.7857, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.07952474488167242, |
|
"learning_rate": 0.00017703420588881946, |
|
"loss": 0.7745, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.08413575439868433, |
|
"learning_rate": 0.00017485107481711012, |
|
"loss": 0.7548, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.088001770504953, |
|
"learning_rate": 0.00017258362865354426, |
|
"loss": 0.7759, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.08460156949153247, |
|
"learning_rate": 0.00017023442153554777, |
|
"loss": 0.7509, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.08605861218817173, |
|
"learning_rate": 0.0001678060996991891, |
|
"loss": 0.7698, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.0815892324710325, |
|
"learning_rate": 0.0001653013984983585, |
|
"loss": 0.7776, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.08642613673653848, |
|
"learning_rate": 0.00016272313932356162, |
|
"loss": 0.7827, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.08052717676550014, |
|
"learning_rate": 0.0001600742264237979, |
|
"loss": 0.7724, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.0909751271495165, |
|
"learning_rate": 0.0001573576436351046, |
|
"loss": 0.7513, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1145574292852593, |
|
"learning_rate": 0.00015457645101945046, |
|
"loss": 0.786, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.09300036341511027, |
|
"learning_rate": 0.00015173378141776568, |
|
"loss": 0.7451, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.09599866375671466, |
|
"learning_rate": 0.00014883283692099112, |
|
"loss": 0.7867, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.08611862144654625, |
|
"learning_rate": 0.00014587688526312143, |
|
"loss": 0.7474, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.09903566292656622, |
|
"learning_rate": 0.00014286925614030542, |
|
"loss": 0.7631, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.08236801719167289, |
|
"learning_rate": 0.0001398133374601501, |
|
"loss": 0.8019, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.08570507340507183, |
|
"learning_rate": 0.00013671257152545277, |
|
"loss": 0.7815, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.08811438275565384, |
|
"learning_rate": 0.0001335704511566605, |
|
"loss": 0.7555, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.09594113587533556, |
|
"learning_rate": 0.0001303905157574247, |
|
"loss": 0.805, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.10311873339145382, |
|
"learning_rate": 0.00012717634732768243, |
|
"loss": 0.7528, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.09761497403725995, |
|
"learning_rate": 0.0001239315664287558, |
|
"loss": 0.7704, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.09326338914693559, |
|
"learning_rate": 0.00012065982810501404, |
|
"loss": 0.7799, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.09690037829569594, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.8005, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.10422234796374458, |
|
"learning_rate": 0.00011405024703850929, |
|
"loss": 0.7847, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.09604529470493206, |
|
"learning_rate": 0.00011071984957874479, |
|
"loss": 0.751, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.09715199179357369, |
|
"learning_rate": 0.00010737737687351284, |
|
"loss": 0.7549, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.10421523612800408, |
|
"learning_rate": 0.00010402659401094152, |
|
"loss": 0.7525, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.08873252434222194, |
|
"learning_rate": 0.00010067127544003563, |
|
"loss": 0.7788, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.10501554131200493, |
|
"learning_rate": 9.73152007189939e-05, |
|
"loss": 0.7582, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.09989556637513738, |
|
"learning_rate": 9.396215025777139e-05, |
|
"loss": 0.6995, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.10589049972382096, |
|
"learning_rate": 9.061590105968208e-05, |
|
"loss": 0.7244, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.10948934725584464, |
|
"learning_rate": 8.728022246683894e-05, |
|
"loss": 0.7736, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.10028721673980243, |
|
"learning_rate": 8.395887191422397e-05, |
|
"loss": 0.7198, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.09739335000058916, |
|
"learning_rate": 8.065559069717088e-05, |
|
"loss": 0.7567, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.10277209169490305, |
|
"learning_rate": 7.73740997570278e-05, |
|
"loss": 0.7568, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.10524797824162033, |
|
"learning_rate": 7.411809548974792e-05, |
|
"loss": 0.7456, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1051395107724441, |
|
"learning_rate": 7.089124558212871e-05, |
|
"loss": 0.7403, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.10747180401281722, |
|
"learning_rate": 6.769718488039023e-05, |
|
"loss": 0.7767, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.09745583890868459, |
|
"learning_rate": 6.453951129574644e-05, |
|
"loss": 0.7535, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.09772137711647556, |
|
"learning_rate": 6.142178175158149e-05, |
|
"loss": 0.7376, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.10842494901973967, |
|
"learning_rate": 5.834750817679606e-05, |
|
"loss": 0.7453, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.09803460989737925, |
|
"learning_rate": 5.5320153549837415e-05, |
|
"loss": 0.7393, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.09966073392521739, |
|
"learning_rate": 5.234312799786921e-05, |
|
"loss": 0.7683, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.11988315536422008, |
|
"learning_rate": 4.9419784955474524e-05, |
|
"loss": 0.7639, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.10150718577374027, |
|
"learning_rate": 4.6553417387219886e-05, |
|
"loss": 0.7485, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.10411916867887729, |
|
"learning_rate": 4.374725407833532e-05, |
|
"loss": 0.7666, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.10826007605053514, |
|
"learning_rate": 4.100445599768774e-05, |
|
"loss": 0.7658, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.10048879684970075, |
|
"learning_rate": 3.832811273714569e-05, |
|
"loss": 0.7551, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.11333121070404563, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.7403, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.10604375075112575, |
|
"learning_rate": 3.318677136178228e-05, |
|
"loss": 0.7582, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.1096204221876785, |
|
"learning_rate": 3.072756464904006e-05, |
|
"loss": 0.7764, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.11335151514771624, |
|
"learning_rate": 2.8346389036906828e-05, |
|
"loss": 0.7211, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.11145444617637609, |
|
"learning_rate": 2.6045926771976303e-05, |
|
"loss": 0.7503, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.11571252661286062, |
|
"learning_rate": 2.382876918226409e-05, |
|
"loss": 0.7736, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.09566191511055883, |
|
"learning_rate": 2.1697413758237784e-05, |
|
"loss": 0.7053, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.102301183425705, |
|
"learning_rate": 1.965426133954854e-05, |
|
"loss": 0.7242, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.10272181778923359, |
|
"learning_rate": 1.7701613410634365e-05, |
|
"loss": 0.7435, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.10470325817832943, |
|
"learning_rate": 1.584166950824061e-05, |
|
"loss": 0.7788, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1056106844203712, |
|
"learning_rate": 1.4076524743778319e-05, |
|
"loss": 0.7414, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.11363112777190297, |
|
"learning_rate": 1.2408167443311214e-05, |
|
"loss": 0.7591, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.10888244685834139, |
|
"learning_rate": 1.083847690782972e-05, |
|
"loss": 0.7441, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.10487039779016745, |
|
"learning_rate": 9.369221296335006e-06, |
|
"loss": 0.7431, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.10805312111545334, |
|
"learning_rate": 8.002055634117578e-06, |
|
"loss": 0.7762, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.1240671004842348, |
|
"learning_rate": 6.738519948473976e-06, |
|
"loss": 0.7716, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.11529942857503232, |
|
"learning_rate": 5.580037533961546e-06, |
|
"loss": 0.7493, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.1141516615544035, |
|
"learning_rate": 4.527913349145441e-06, |
|
"loss": 0.7467, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.10997206278089339, |
|
"learning_rate": 3.5833325466437694e-06, |
|
"loss": 0.7703, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.10883189752644193, |
|
"learning_rate": 2.7473591381266708e-06, |
|
"loss": 0.7482, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10933292874827852, |
|
"learning_rate": 2.0209347957732328e-06, |
|
"loss": 0.7275, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1188872123396054, |
|
"learning_rate": 1.404877791536141e-06, |
|
"loss": 0.7586, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.10543745434408874, |
|
"learning_rate": 8.998820754091531e-07, |
|
"loss": 0.7219, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.11637792211917665, |
|
"learning_rate": 5.065164937354428e-07, |
|
"loss": 0.7667, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.11076875024964886, |
|
"learning_rate": 2.2522414843748618e-07, |
|
"loss": 0.7679, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1102628163050046, |
|
"learning_rate": 5.632189789027687e-08, |
|
"loss": 0.7657, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.1060248448764366, |
|
"learning_rate": 0.0, |
|
"loss": 0.7273, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.9492523670196533, |
|
"eval_runtime": 489.5465, |
|
"eval_samples_per_second": 4.719, |
|
"eval_steps_per_second": 0.296, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 520, |
|
"total_flos": 6008504340971520.0, |
|
"train_loss": 0.7879391065010658, |
|
"train_runtime": 18212.2408, |
|
"train_samples_per_second": 1.826, |
|
"train_steps_per_second": 0.029 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 520, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 6008504340971520.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|