seyabde's picture
Upload folder using huggingface_hub
194aa91
raw
history blame
2.24 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 32028,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.2,
"learning_rate": 1.997814548860443e-05,
"loss": 2.2558,
"step": 2135
},
{
"epoch": 0.4,
"learning_rate": 2.889158716392021e-05,
"loss": 1.8307,
"step": 4270
},
{
"epoch": 0.6,
"learning_rate": 2.6671639202081526e-05,
"loss": 1.6597,
"step": 6405
},
{
"epoch": 0.8,
"learning_rate": 2.445273200346921e-05,
"loss": 1.5658,
"step": 8540
},
{
"epoch": 1.0,
"learning_rate": 2.2230702515177798e-05,
"loss": 1.5067,
"step": 10675
},
{
"epoch": 1.2,
"learning_rate": 2.000971379011275e-05,
"loss": 1.454,
"step": 12810
},
{
"epoch": 1.4,
"learning_rate": 1.7787684301821338e-05,
"loss": 1.4184,
"step": 14945
},
{
"epoch": 1.6,
"learning_rate": 1.5565654813529924e-05,
"loss": 1.3922,
"step": 17080
},
{
"epoch": 1.8,
"learning_rate": 1.3343625325238507e-05,
"loss": 1.3727,
"step": 19215
},
{
"epoch": 2.0,
"learning_rate": 1.112263660017346e-05,
"loss": 1.3499,
"step": 21350
},
{
"epoch": 2.2,
"learning_rate": 8.900607111882046e-06,
"loss": 1.3314,
"step": 23485
},
{
"epoch": 2.4,
"learning_rate": 6.680659150043365e-06,
"loss": 1.3167,
"step": 25620
},
{
"epoch": 2.6,
"learning_rate": 4.458629661751952e-06,
"loss": 1.3074,
"step": 27755
},
{
"epoch": 2.8,
"learning_rate": 2.237640936686904e-06,
"loss": 1.304,
"step": 29890
},
{
"epoch": 3.0,
"learning_rate": 1.5611448395490027e-08,
"loss": 1.2955,
"step": 32025
}
],
"logging_steps": 2135,
"max_steps": 32028,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 2.801023057794171e+18,
"trial_name": null,
"trial_params": null
}