Laurent1's picture
Upload 12 files
af67cc0
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.996875,
"eval_steps": 500,
"global_step": 1599,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.2,
"learning_rate": 9.896907216494846e-05,
"loss": 1.5529,
"step": 64
},
{
"epoch": 0.4,
"learning_rate": 9.484536082474227e-05,
"loss": 0.9276,
"step": 128
},
{
"epoch": 0.6,
"learning_rate": 9.072164948453609e-05,
"loss": 0.879,
"step": 192
},
{
"epoch": 0.8,
"learning_rate": 8.65979381443299e-05,
"loss": 0.8731,
"step": 256
},
{
"epoch": 1.0,
"learning_rate": 8.247422680412371e-05,
"loss": 0.8242,
"step": 320
},
{
"epoch": 1.2,
"learning_rate": 7.835051546391753e-05,
"loss": 0.7293,
"step": 384
},
{
"epoch": 1.4,
"learning_rate": 7.422680412371135e-05,
"loss": 0.7194,
"step": 448
},
{
"epoch": 1.6,
"learning_rate": 7.010309278350515e-05,
"loss": 0.6864,
"step": 512
},
{
"epoch": 1.8,
"learning_rate": 6.597938144329897e-05,
"loss": 0.7096,
"step": 576
},
{
"epoch": 2.0,
"learning_rate": 6.185567010309279e-05,
"loss": 0.6913,
"step": 640
},
{
"epoch": 2.2,
"learning_rate": 5.7731958762886594e-05,
"loss": 0.5724,
"step": 704
},
{
"epoch": 2.4,
"learning_rate": 5.360824742268041e-05,
"loss": 0.5465,
"step": 768
},
{
"epoch": 2.6,
"learning_rate": 4.948453608247423e-05,
"loss": 0.5307,
"step": 832
},
{
"epoch": 2.8,
"learning_rate": 4.536082474226804e-05,
"loss": 0.5446,
"step": 896
},
{
"epoch": 3.0,
"learning_rate": 4.1237113402061855e-05,
"loss": 0.5501,
"step": 960
},
{
"epoch": 3.2,
"learning_rate": 3.7113402061855674e-05,
"loss": 0.4308,
"step": 1024
},
{
"epoch": 3.4,
"learning_rate": 3.2989690721649485e-05,
"loss": 0.4082,
"step": 1088
},
{
"epoch": 3.6,
"learning_rate": 2.8865979381443297e-05,
"loss": 0.4158,
"step": 1152
},
{
"epoch": 3.8,
"learning_rate": 2.4742268041237116e-05,
"loss": 0.4281,
"step": 1216
},
{
"epoch": 4.0,
"learning_rate": 2.0618556701030927e-05,
"loss": 0.4065,
"step": 1280
},
{
"epoch": 4.2,
"learning_rate": 1.6494845360824743e-05,
"loss": 0.3311,
"step": 1344
},
{
"epoch": 4.4,
"learning_rate": 1.2371134020618558e-05,
"loss": 0.3239,
"step": 1408
},
{
"epoch": 4.6,
"learning_rate": 8.247422680412371e-06,
"loss": 0.322,
"step": 1472
},
{
"epoch": 4.8,
"learning_rate": 4.123711340206186e-06,
"loss": 0.3261,
"step": 1536
}
],
"logging_steps": 64,
"max_steps": 1600,
"num_train_epochs": 5,
"save_steps": 1599,
"total_flos": 1.2482695900296806e+17,
"trial_name": null,
"trial_params": null
}