|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 8.0, |
|
"global_step": 12336, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9189364461738006e-05, |
|
"loss": 2.3643, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8378728923476004e-05, |
|
"loss": 2.2958, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.756809338521401e-05, |
|
"loss": 2.2954, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.0372068881988525, |
|
"eval_runtime": 81.4002, |
|
"eval_samples_per_second": 63.796, |
|
"eval_steps_per_second": 7.985, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.675745784695201e-05, |
|
"loss": 2.2408, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.5946822308690015e-05, |
|
"loss": 2.213, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.5136186770428017e-05, |
|
"loss": 2.2015, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.0103893280029297, |
|
"eval_runtime": 81.4187, |
|
"eval_samples_per_second": 63.781, |
|
"eval_steps_per_second": 7.983, |
|
"step": 3084 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.432555123216602e-05, |
|
"loss": 2.1898, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.3514915693904023e-05, |
|
"loss": 2.1665, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.2704280155642024e-05, |
|
"loss": 2.1661, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 2.0372352600097656, |
|
"eval_runtime": 81.1974, |
|
"eval_samples_per_second": 63.955, |
|
"eval_steps_per_second": 8.005, |
|
"step": 4626 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.1893644617380028e-05, |
|
"loss": 2.1286, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.108300907911803e-05, |
|
"loss": 2.1179, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.027237354085603e-05, |
|
"loss": 2.1186, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.9548556804656982, |
|
"eval_runtime": 81.2263, |
|
"eval_samples_per_second": 63.932, |
|
"eval_steps_per_second": 8.002, |
|
"step": 6168 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 9.461738002594033e-06, |
|
"loss": 2.1206, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 8.651102464332037e-06, |
|
"loss": 2.1017, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 7.84046692607004e-06, |
|
"loss": 2.0939, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.9438201189041138, |
|
"eval_runtime": 81.1944, |
|
"eval_samples_per_second": 63.958, |
|
"eval_steps_per_second": 8.005, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 7.029831387808041e-06, |
|
"loss": 2.084, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 6.219195849546045e-06, |
|
"loss": 2.0629, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 5.4085603112840465e-06, |
|
"loss": 2.0867, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.9647932052612305, |
|
"eval_runtime": 81.2286, |
|
"eval_samples_per_second": 63.931, |
|
"eval_steps_per_second": 8.002, |
|
"step": 9252 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 4.5979247730220496e-06, |
|
"loss": 2.0697, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 3.7872892347600522e-06, |
|
"loss": 2.0516, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 2.9766536964980545e-06, |
|
"loss": 2.0462, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.9464805126190186, |
|
"eval_runtime": 81.3559, |
|
"eval_samples_per_second": 63.831, |
|
"eval_steps_per_second": 7.99, |
|
"step": 10794 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 2.166018158236057e-06, |
|
"loss": 2.0434, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.3553826199740596e-06, |
|
"loss": 2.0279, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 5.447470817120623e-07, |
|
"loss": 2.0315, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.9411877393722534, |
|
"eval_runtime": 81.3329, |
|
"eval_samples_per_second": 63.849, |
|
"eval_steps_per_second": 7.992, |
|
"step": 12336 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"step": 12336, |
|
"total_flos": 2.0776857912786125e+17, |
|
"train_loss": 2.136193385538579, |
|
"train_runtime": 46556.9069, |
|
"train_samples_per_second": 16.955, |
|
"train_steps_per_second": 0.265 |
|
} |
|
], |
|
"max_steps": 12336, |
|
"num_train_epochs": 8, |
|
"total_flos": 2.0776857912786125e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|