|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 39924, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9373810239454966e-05, |
|
"loss": 3.0783, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.874762047890993e-05, |
|
"loss": 2.5238, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8121430718364894e-05, |
|
"loss": 2.2994, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.749524095781986e-05, |
|
"loss": 2.1655, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.686905119727483e-05, |
|
"loss": 2.1007, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.624286143672979e-05, |
|
"loss": 2.0447, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5616671676184756e-05, |
|
"loss": 1.9793, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.499048191563971e-05, |
|
"loss": 1.928, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.436429215509468e-05, |
|
"loss": 1.8916, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.373810239454965e-05, |
|
"loss": 1.8331, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.311191263400461e-05, |
|
"loss": 1.8014, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2485722873459575e-05, |
|
"loss": 1.7924, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.185953311291454e-05, |
|
"loss": 1.7729, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.12333433523695e-05, |
|
"loss": 1.7446, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.060715359182447e-05, |
|
"loss": 1.7415, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9980963831279437e-05, |
|
"loss": 1.7042, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9354774070734394e-05, |
|
"loss": 1.678, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8728584310189364e-05, |
|
"loss": 1.6576, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.810239454964433e-05, |
|
"loss": 1.6765, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.747620478909929e-05, |
|
"loss": 1.6109, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6850015028554255e-05, |
|
"loss": 1.6197, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.622382526800922e-05, |
|
"loss": 1.6011, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.559763550746418e-05, |
|
"loss": 1.5907, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.497144574691915e-05, |
|
"loss": 1.5875, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.434525598637412e-05, |
|
"loss": 1.5752, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3719066225829074e-05, |
|
"loss": 1.5237, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.309287646528404e-05, |
|
"loss": 1.4724, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.246668670473901e-05, |
|
"loss": 1.3053, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.184049694419397e-05, |
|
"loss": 1.3191, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1214307183648936e-05, |
|
"loss": 1.2972, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.05881174231039e-05, |
|
"loss": 1.2705, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9961927662558864e-05, |
|
"loss": 1.3129, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9335737902013828e-05, |
|
"loss": 1.3122, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8709548141468788e-05, |
|
"loss": 1.2842, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.808335838092376e-05, |
|
"loss": 1.2766, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.745716862037872e-05, |
|
"loss": 1.2869, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6830978859833683e-05, |
|
"loss": 1.2822, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.620478909928865e-05, |
|
"loss": 1.2513, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5578599338743614e-05, |
|
"loss": 1.2716, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.495240957819858e-05, |
|
"loss": 1.2569, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.432621981765354e-05, |
|
"loss": 1.2537, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.370003005710851e-05, |
|
"loss": 1.26, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.307384029656347e-05, |
|
"loss": 1.2559, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2447650536018436e-05, |
|
"loss": 1.2259, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.18214607754734e-05, |
|
"loss": 1.2254, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1195271014928364e-05, |
|
"loss": 1.2075, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.056908125438333e-05, |
|
"loss": 1.2135, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9942891493838295e-05, |
|
"loss": 1.2082, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.931670173329326e-05, |
|
"loss": 1.1962, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8690511972748222e-05, |
|
"loss": 1.2139, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8064322212203186e-05, |
|
"loss": 1.1852, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.743813245165815e-05, |
|
"loss": 1.1937, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6811942691113117e-05, |
|
"loss": 1.1962, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.618575293056808e-05, |
|
"loss": 1.0053, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5559563170023044e-05, |
|
"loss": 0.9642, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4933373409478008e-05, |
|
"loss": 0.9592, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4307183648932974e-05, |
|
"loss": 0.9827, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.368099388838794e-05, |
|
"loss": 0.9665, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3054804127842901e-05, |
|
"loss": 0.9826, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2428614367297867e-05, |
|
"loss": 0.9679, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.180242460675283e-05, |
|
"loss": 0.9684, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1176234846207794e-05, |
|
"loss": 0.9709, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.055004508566276e-05, |
|
"loss": 0.9489, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.923855325117724e-06, |
|
"loss": 0.9516, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.297665564572689e-06, |
|
"loss": 0.9691, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.671475804027653e-06, |
|
"loss": 0.9417, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.045286043482618e-06, |
|
"loss": 0.9505, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.419096282937582e-06, |
|
"loss": 0.9594, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.792906522392546e-06, |
|
"loss": 0.9334, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.1667167618475106e-06, |
|
"loss": 0.9486, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.540527001302475e-06, |
|
"loss": 0.9476, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.914337240757439e-06, |
|
"loss": 0.9366, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.288147480212404e-06, |
|
"loss": 0.9253, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.661957719667368e-06, |
|
"loss": 0.9279, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.035767959122333e-06, |
|
"loss": 0.9246, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.409578198577297e-06, |
|
"loss": 0.923, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7833884380322613e-06, |
|
"loss": 0.9241, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.157198677487226e-06, |
|
"loss": 0.9359, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.310089169421901e-07, |
|
"loss": 0.9117, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 39924, |
|
"total_flos": 5.321321329491149e+16, |
|
"train_loss": 1.3535074337774975, |
|
"train_runtime": 29786.9631, |
|
"train_samples_per_second": 10.722, |
|
"train_steps_per_second": 1.34 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 39924, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 10000, |
|
"total_flos": 5.321321329491149e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|