|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 27665, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.9457798662570034e-05, |
|
"loss": 1.8933, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.891559732514007e-05, |
|
"loss": 1.2883, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.83733959877101e-05, |
|
"loss": 1.1257, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.783119465028014e-05, |
|
"loss": 1.0502, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7288993312850172e-05, |
|
"loss": 1.0699, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.6746791975420206e-05, |
|
"loss": 0.9881, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.6204590637990243e-05, |
|
"loss": 0.9836, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.5662389300560273e-05, |
|
"loss": 0.9681, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.512018796313031e-05, |
|
"loss": 0.969, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.4577986625700344e-05, |
|
"loss": 0.9503, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.4035785288270377e-05, |
|
"loss": 0.9695, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.3493583950840414e-05, |
|
"loss": 0.636, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.2951382613410445e-05, |
|
"loss": 0.6444, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.2409181275980482e-05, |
|
"loss": 0.6331, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.1866979938550515e-05, |
|
"loss": 0.6488, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.132477860112055e-05, |
|
"loss": 0.6312, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.0782577263690586e-05, |
|
"loss": 0.6953, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.024037592626062e-05, |
|
"loss": 0.5981, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.9698174588830653e-05, |
|
"loss": 0.6499, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9155973251400687e-05, |
|
"loss": 0.6395, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.861377191397072e-05, |
|
"loss": 0.6079, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.8071570576540758e-05, |
|
"loss": 0.6615, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.752936923911079e-05, |
|
"loss": 0.3813, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.6987167901680825e-05, |
|
"loss": 0.3715, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.644496656425086e-05, |
|
"loss": 0.3573, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.5902765226820892e-05, |
|
"loss": 0.3673, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.536056388939093e-05, |
|
"loss": 0.3802, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4818362551960961e-05, |
|
"loss": 0.3911, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.4276161214530996e-05, |
|
"loss": 0.3691, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.373395987710103e-05, |
|
"loss": 0.3998, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.3191758539671065e-05, |
|
"loss": 0.3819, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.26495572022411e-05, |
|
"loss": 0.3748, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.2107355864811133e-05, |
|
"loss": 0.391, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.1565154527381168e-05, |
|
"loss": 0.2517, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.1022953189951202e-05, |
|
"loss": 0.2038, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.0480751852521237e-05, |
|
"loss": 0.1975, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 9.938550515091272e-06, |
|
"loss": 0.21, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 9.396349177661304e-06, |
|
"loss": 0.2048, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 8.85414784023134e-06, |
|
"loss": 0.212, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 8.311946502801373e-06, |
|
"loss": 0.2126, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 7.769745165371408e-06, |
|
"loss": 0.2084, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 7.227543827941442e-06, |
|
"loss": 0.2097, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 6.685342490511477e-06, |
|
"loss": 0.2295, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 6.143141153081511e-06, |
|
"loss": 0.2157, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 5.6009398156515455e-06, |
|
"loss": 0.157, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 5.058738478221579e-06, |
|
"loss": 0.0847, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 4.516537140791614e-06, |
|
"loss": 0.0945, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.974335803361649e-06, |
|
"loss": 0.1156, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.4321344659316826e-06, |
|
"loss": 0.0965, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.889933128501717e-06, |
|
"loss": 0.1191, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.3477317910717515e-06, |
|
"loss": 0.0982, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.8055304536417856e-06, |
|
"loss": 0.1145, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.26332911621182e-06, |
|
"loss": 0.0894, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 7.211277787818543e-07, |
|
"loss": 0.0939, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.7892644135188868e-07, |
|
"loss": 0.0916, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 27665, |
|
"total_flos": 3.407082354156503e+17, |
|
"train_runtime": 30959.6058, |
|
"train_samples_per_second": 14.297, |
|
"train_steps_per_second": 0.894 |
|
} |
|
], |
|
"max_steps": 27665, |
|
"num_train_epochs": 5, |
|
"total_flos": 3.407082354156503e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|