|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 35, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0, |
|
"loss": 3.1568, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0, |
|
"loss": 2.0929, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0, |
|
"loss": 1.8548, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0, |
|
"loss": 1.9561, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 31.57492020476853, |
|
"learning_rate": 0.0, |
|
"loss": 2.158, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 31.153859925878365, |
|
"learning_rate": 2e-05, |
|
"loss": 2.1095, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 27.11565016529603, |
|
"learning_rate": 2e-05, |
|
"loss": 1.9969, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 11.963172987739094, |
|
"learning_rate": 1.9393939393939395e-05, |
|
"loss": 1.8882, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 12.547776129299391, |
|
"learning_rate": 1.8787878787878792e-05, |
|
"loss": 2.2821, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 13.995949840524457, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 1.9093, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.5714285714285714, |
|
"grad_norm": 32.10247036468728, |
|
"learning_rate": 1.7575757575757576e-05, |
|
"loss": 1.7738, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.7142857142857144, |
|
"grad_norm": 12.920575409822058, |
|
"learning_rate": 1.6969696969696972e-05, |
|
"loss": 1.9714, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.8571428571428572, |
|
"grad_norm": 12.06286747031755, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 1.9296, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 8.907029074340802, |
|
"learning_rate": 1.575757575757576e-05, |
|
"loss": 1.8614, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 2.142857142857143, |
|
"grad_norm": 6.73971706849257, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 1.6771, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 2.2857142857142856, |
|
"grad_norm": 17.60219359166014, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 1.8053, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 2.4285714285714284, |
|
"grad_norm": 16.964715579903814, |
|
"learning_rate": 1.3939393939393942e-05, |
|
"loss": 1.6802, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 2.571428571428571, |
|
"grad_norm": 17.69250115636477, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.7371, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 2.7142857142857144, |
|
"grad_norm": 15.007342258217257, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 1.8895, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 12.014421697915866, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 1.74, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 6.420645268892007, |
|
"learning_rate": 1.1515151515151517e-05, |
|
"loss": 1.646, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 3.142857142857143, |
|
"grad_norm": 9.579171202930794, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 1.783, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 3.2857142857142856, |
|
"grad_norm": 9.480870311077393, |
|
"learning_rate": 1.0303030303030304e-05, |
|
"loss": 1.6975, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 3.4285714285714284, |
|
"grad_norm": 11.085052433397502, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 1.5964, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 3.571428571428571, |
|
"grad_norm": 10.405891481014418, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.8499, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 3.7142857142857144, |
|
"grad_norm": 7.19377512459299, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 1.9397, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 3.857142857142857, |
|
"grad_norm": 20.100305416386867, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 1.6575, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 6.598534461792029, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.8201, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 4.142857142857143, |
|
"grad_norm": 7.697231181333111, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.6547, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 4.285714285714286, |
|
"grad_norm": 5.6666359994095865, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 1.6578, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 4.428571428571429, |
|
"grad_norm": 9.64546505010074, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.5213, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 4.571428571428571, |
|
"grad_norm": 7.511399316368516, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 1.7394, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 4.714285714285714, |
|
"grad_norm": 11.799130579067045, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 1.7876, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 4.857142857142857, |
|
"grad_norm": 8.101541311297026, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.7558, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 11.45990563904692, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 1.8384, |
|
"step": 35 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 35, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|