|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 50.0, |
|
"eval_steps": 10000, |
|
"global_step": 126850, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.9999842333464724e-05, |
|
"loss": 0.2456, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9842333464722114e-05, |
|
"loss": 0.1742, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.9685297595585336e-05, |
|
"loss": 0.1345, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.952763106030745e-05, |
|
"loss": 0.0825, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.9370122191564842e-05, |
|
"loss": 0.0691, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.9212455656286955e-05, |
|
"loss": 0.0593, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.9054789121009067e-05, |
|
"loss": 0.0559, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.889712258573118e-05, |
|
"loss": 0.0505, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.8739456050453293e-05, |
|
"loss": 0.0585, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.8581789515175405e-05, |
|
"loss": 0.0496, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.84242806464328e-05, |
|
"loss": 0.0504, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"eval_accuracy": 0.9750283768444948, |
|
"eval_f1": 0.9112903225806451, |
|
"eval_loss": 0.02506309188902378, |
|
"eval_precision": 0.8692307692307693, |
|
"eval_recall": 0.9576271186440678, |
|
"eval_runtime": 6.3761, |
|
"eval_samples_per_second": 276.344, |
|
"eval_steps_per_second": 34.661, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.8266614111154908e-05, |
|
"loss": 0.0442, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.8109105242412298e-05, |
|
"loss": 0.0467, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 1.795159637366969e-05, |
|
"loss": 0.0448, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 1.7793929838391804e-05, |
|
"loss": 0.0418, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 1.7636263303113916e-05, |
|
"loss": 0.0339, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.747859676783603e-05, |
|
"loss": 0.0385, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.732093023255814e-05, |
|
"loss": 0.0425, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.716342136381553e-05, |
|
"loss": 0.0413, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.7005754828537644e-05, |
|
"loss": 0.039, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.6848088293259757e-05, |
|
"loss": 0.0398, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"eval_accuracy": 0.9750283768444948, |
|
"eval_f1": 0.9126984126984127, |
|
"eval_loss": 0.02478128857910633, |
|
"eval_precision": 0.8582089552238806, |
|
"eval_recall": 0.9745762711864406, |
|
"eval_runtime": 6.0449, |
|
"eval_samples_per_second": 291.487, |
|
"eval_steps_per_second": 36.56, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 1.6690737091052424e-05, |
|
"loss": 0.0347, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 1.6533070555774536e-05, |
|
"loss": 0.0428, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 1.637540402049665e-05, |
|
"loss": 0.0391, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 1.6217895151754042e-05, |
|
"loss": 0.0347, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 1.6060228616476155e-05, |
|
"loss": 0.0336, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 1.5902562081198268e-05, |
|
"loss": 0.033, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 1.574489554592038e-05, |
|
"loss": 0.0318, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 1.5587229010642493e-05, |
|
"loss": 0.0338, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 1.5429562475364605e-05, |
|
"loss": 0.0315, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 1.5272053606621995e-05, |
|
"loss": 0.0271, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"eval_accuracy": 0.9903518728717366, |
|
"eval_f1": 0.9649484536082474, |
|
"eval_loss": 0.010149164125323296, |
|
"eval_precision": 0.9397590361445783, |
|
"eval_recall": 0.9915254237288136, |
|
"eval_runtime": 6.3243, |
|
"eval_samples_per_second": 278.608, |
|
"eval_steps_per_second": 34.945, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"learning_rate": 1.5114387071344108e-05, |
|
"loss": 0.0343, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 1.49568782026015e-05, |
|
"loss": 0.0311, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 1.4799369333858891e-05, |
|
"loss": 0.0278, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 1.4641702798581004e-05, |
|
"loss": 0.027, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 1.4484036263303116e-05, |
|
"loss": 0.0285, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"learning_rate": 1.4326527394560506e-05, |
|
"loss": 0.0303, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 1.4168860859282619e-05, |
|
"loss": 0.0253, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 14.98, |
|
"learning_rate": 1.401135199054001e-05, |
|
"loss": 0.0252, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 1.3853685455262123e-05, |
|
"loss": 0.027, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"learning_rate": 1.3696018919984236e-05, |
|
"loss": 0.0257, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"eval_accuracy": 0.9841089670828603, |
|
"eval_f1": 0.940677966101695, |
|
"eval_loss": 0.031208263710141182, |
|
"eval_precision": 0.940677966101695, |
|
"eval_recall": 0.940677966101695, |
|
"eval_runtime": 6.3572, |
|
"eval_samples_per_second": 277.168, |
|
"eval_steps_per_second": 34.764, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"learning_rate": 1.3538352384706348e-05, |
|
"loss": 0.0252, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"learning_rate": 1.3381001182499016e-05, |
|
"loss": 0.0272, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 1.3223334647221128e-05, |
|
"loss": 0.023, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 1.306566811194324e-05, |
|
"loss": 0.0254, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 1.290815924320063e-05, |
|
"loss": 0.0229, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 18.13, |
|
"learning_rate": 1.2750492707922743e-05, |
|
"loss": 0.0241, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"learning_rate": 1.2592826172644856e-05, |
|
"loss": 0.0234, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 1.2435317303902248e-05, |
|
"loss": 0.0237, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"learning_rate": 1.227765076862436e-05, |
|
"loss": 0.023, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"learning_rate": 1.2119984233346473e-05, |
|
"loss": 0.0269, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 19.71, |
|
"eval_accuracy": 0.9948921679909194, |
|
"eval_f1": 0.9809725158562368, |
|
"eval_loss": 0.00856984406709671, |
|
"eval_precision": 0.9789029535864979, |
|
"eval_recall": 0.9830508474576272, |
|
"eval_runtime": 6.0459, |
|
"eval_samples_per_second": 291.435, |
|
"eval_steps_per_second": 36.553, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 20.1, |
|
"learning_rate": 1.1962317698068587e-05, |
|
"loss": 0.0213, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 1.18046511627907e-05, |
|
"loss": 0.0223, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 20.89, |
|
"learning_rate": 1.1647142294048088e-05, |
|
"loss": 0.0247, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 21.28, |
|
"learning_rate": 1.148963342530548e-05, |
|
"loss": 0.0181, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"learning_rate": 1.1331966890027592e-05, |
|
"loss": 0.0198, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 22.07, |
|
"learning_rate": 1.1174300354749705e-05, |
|
"loss": 0.0216, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 22.47, |
|
"learning_rate": 1.1016791486007096e-05, |
|
"loss": 0.0214, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 22.86, |
|
"learning_rate": 1.0859124950729209e-05, |
|
"loss": 0.0208, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 23.26, |
|
"learning_rate": 1.0701773748521878e-05, |
|
"loss": 0.0174, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 23.65, |
|
"learning_rate": 1.054410721324399e-05, |
|
"loss": 0.0193, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 23.65, |
|
"eval_accuracy": 0.9943246311010215, |
|
"eval_f1": 0.9789029535864979, |
|
"eval_loss": 0.010104711167514324, |
|
"eval_precision": 0.9747899159663865, |
|
"eval_recall": 0.9830508474576272, |
|
"eval_runtime": 6.1461, |
|
"eval_samples_per_second": 286.688, |
|
"eval_steps_per_second": 35.958, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 24.04, |
|
"learning_rate": 1.0386440677966103e-05, |
|
"loss": 0.0204, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 24.44, |
|
"learning_rate": 1.0228774142688216e-05, |
|
"loss": 0.0189, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 24.83, |
|
"learning_rate": 1.0071107607410328e-05, |
|
"loss": 0.0158, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 25.23, |
|
"learning_rate": 9.913598738667718e-06, |
|
"loss": 0.022, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 25.62, |
|
"learning_rate": 9.755932203389833e-06, |
|
"loss": 0.0176, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 26.01, |
|
"learning_rate": 9.598265668111943e-06, |
|
"loss": 0.0176, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 26.41, |
|
"learning_rate": 9.440599132834056e-06, |
|
"loss": 0.0164, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 26.8, |
|
"learning_rate": 9.283090264091448e-06, |
|
"loss": 0.0196, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 27.2, |
|
"learning_rate": 9.125581395348838e-06, |
|
"loss": 0.0168, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 27.59, |
|
"learning_rate": 8.96791486007095e-06, |
|
"loss": 0.0167, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 27.59, |
|
"eval_accuracy": 0.9960272417707151, |
|
"eval_f1": 0.9849462365591398, |
|
"eval_loss": 0.015500849112868309, |
|
"eval_precision": 1.0, |
|
"eval_recall": 0.9703389830508474, |
|
"eval_runtime": 6.0236, |
|
"eval_samples_per_second": 292.516, |
|
"eval_steps_per_second": 36.689, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 27.99, |
|
"learning_rate": 8.810248324793063e-06, |
|
"loss": 0.018, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 28.38, |
|
"learning_rate": 8.652739456050455e-06, |
|
"loss": 0.0133, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 28.77, |
|
"learning_rate": 8.495388253843122e-06, |
|
"loss": 0.0187, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 29.17, |
|
"learning_rate": 8.337721718565234e-06, |
|
"loss": 0.0167, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 29.56, |
|
"learning_rate": 8.180055183287347e-06, |
|
"loss": 0.0151, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 29.96, |
|
"learning_rate": 8.02238864800946e-06, |
|
"loss": 0.0127, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 30.35, |
|
"learning_rate": 7.864722112731574e-06, |
|
"loss": 0.017, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 30.74, |
|
"learning_rate": 7.707213243988964e-06, |
|
"loss": 0.0148, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 31.14, |
|
"learning_rate": 7.5495467087110764e-06, |
|
"loss": 0.0136, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 31.53, |
|
"learning_rate": 7.391880173433189e-06, |
|
"loss": 0.0141, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 31.53, |
|
"eval_accuracy": 0.9954597048808173, |
|
"eval_f1": 0.9829059829059829, |
|
"eval_loss": 0.012631294317543507, |
|
"eval_precision": 0.9913793103448276, |
|
"eval_recall": 0.9745762711864406, |
|
"eval_runtime": 6.0398, |
|
"eval_samples_per_second": 291.729, |
|
"eval_steps_per_second": 36.59, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 31.93, |
|
"learning_rate": 7.23437130469058e-06, |
|
"loss": 0.0156, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 32.32, |
|
"learning_rate": 7.0767047694126924e-06, |
|
"loss": 0.0136, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 32.72, |
|
"learning_rate": 6.919038234134805e-06, |
|
"loss": 0.013, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 33.11, |
|
"learning_rate": 6.761371698856918e-06, |
|
"loss": 0.0093, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 33.5, |
|
"learning_rate": 6.603862830114309e-06, |
|
"loss": 0.0121, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 33.9, |
|
"learning_rate": 6.4463539613717e-06, |
|
"loss": 0.0136, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 34.29, |
|
"learning_rate": 6.288687426093813e-06, |
|
"loss": 0.013, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 34.69, |
|
"learning_rate": 6.131178557351203e-06, |
|
"loss": 0.0135, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 35.08, |
|
"learning_rate": 5.973512022073315e-06, |
|
"loss": 0.016, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 35.47, |
|
"learning_rate": 5.815845486795429e-06, |
|
"loss": 0.0144, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 35.47, |
|
"eval_accuracy": 0.996594778660613, |
|
"eval_f1": 0.9872340425531915, |
|
"eval_loss": 0.009375466965138912, |
|
"eval_precision": 0.9914529914529915, |
|
"eval_recall": 0.9830508474576272, |
|
"eval_runtime": 6.361, |
|
"eval_samples_per_second": 276.999, |
|
"eval_steps_per_second": 34.743, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 35.87, |
|
"learning_rate": 5.658336618052819e-06, |
|
"loss": 0.0155, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 36.26, |
|
"learning_rate": 5.500670082774931e-06, |
|
"loss": 0.0126, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 36.66, |
|
"learning_rate": 5.343161214032322e-06, |
|
"loss": 0.0094, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 37.05, |
|
"learning_rate": 5.185494678754435e-06, |
|
"loss": 0.0106, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 37.45, |
|
"learning_rate": 5.027828143476547e-06, |
|
"loss": 0.0115, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 37.84, |
|
"learning_rate": 4.870161608198661e-06, |
|
"loss": 0.0087, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 38.23, |
|
"learning_rate": 4.712495072920773e-06, |
|
"loss": 0.0126, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 38.63, |
|
"learning_rate": 4.554986204178163e-06, |
|
"loss": 0.009, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 39.02, |
|
"learning_rate": 4.397319668900277e-06, |
|
"loss": 0.0129, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 39.42, |
|
"learning_rate": 4.239653133622389e-06, |
|
"loss": 0.009, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 39.42, |
|
"eval_accuracy": 0.996594778660613, |
|
"eval_f1": 0.9872881355932204, |
|
"eval_loss": 0.008661070838570595, |
|
"eval_precision": 0.9872881355932204, |
|
"eval_recall": 0.9872881355932204, |
|
"eval_runtime": 6.0434, |
|
"eval_samples_per_second": 291.559, |
|
"eval_steps_per_second": 36.569, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 39.81, |
|
"learning_rate": 4.081986598344502e-06, |
|
"loss": 0.0066, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 40.2, |
|
"learning_rate": 3.924477729601892e-06, |
|
"loss": 0.0116, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 40.6, |
|
"learning_rate": 3.7668111943240047e-06, |
|
"loss": 0.0099, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 40.99, |
|
"learning_rate": 3.6091446590461178e-06, |
|
"loss": 0.0087, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 41.39, |
|
"learning_rate": 3.4516357903035086e-06, |
|
"loss": 0.0092, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 41.78, |
|
"learning_rate": 3.2939692550256207e-06, |
|
"loss": 0.0098, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 42.18, |
|
"learning_rate": 3.1363027197477337e-06, |
|
"loss": 0.0092, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 42.57, |
|
"learning_rate": 2.9787938510051245e-06, |
|
"loss": 0.0111, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 42.96, |
|
"learning_rate": 2.821127315727237e-06, |
|
"loss": 0.0077, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 43.36, |
|
"learning_rate": 2.66346078044935e-06, |
|
"loss": 0.0084, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 43.36, |
|
"eval_accuracy": 0.9954597048808173, |
|
"eval_f1": 0.9828326180257511, |
|
"eval_loss": 0.018768297508358955, |
|
"eval_precision": 0.9956521739130435, |
|
"eval_recall": 0.9703389830508474, |
|
"eval_runtime": 5.9996, |
|
"eval_samples_per_second": 293.685, |
|
"eval_steps_per_second": 36.836, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 43.75, |
|
"learning_rate": 2.5059519117067405e-06, |
|
"loss": 0.0086, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 44.15, |
|
"learning_rate": 2.348285376428853e-06, |
|
"loss": 0.0092, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 44.54, |
|
"learning_rate": 2.190618841150966e-06, |
|
"loss": 0.0099, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 44.93, |
|
"learning_rate": 2.0331099724083565e-06, |
|
"loss": 0.0101, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 45.33, |
|
"learning_rate": 1.8754434371304691e-06, |
|
"loss": 0.0079, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 45.72, |
|
"learning_rate": 1.71793456838786e-06, |
|
"loss": 0.0102, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 46.12, |
|
"learning_rate": 1.5602680331099725e-06, |
|
"loss": 0.0086, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 46.51, |
|
"learning_rate": 1.4026014978320853e-06, |
|
"loss": 0.0097, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 46.91, |
|
"learning_rate": 1.2449349625541981e-06, |
|
"loss": 0.0048, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 47.3, |
|
"learning_rate": 1.0872684272763107e-06, |
|
"loss": 0.0112, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 47.3, |
|
"eval_accuracy": 0.996594778660613, |
|
"eval_f1": 0.9872881355932204, |
|
"eval_loss": 0.008344221860170364, |
|
"eval_precision": 0.9872881355932204, |
|
"eval_recall": 0.9872881355932204, |
|
"eval_runtime": 6.0461, |
|
"eval_samples_per_second": 291.429, |
|
"eval_steps_per_second": 36.553, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 47.69, |
|
"learning_rate": 9.297595585337013e-07, |
|
"loss": 0.0069, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 48.09, |
|
"learning_rate": 7.72250689791092e-07, |
|
"loss": 0.0103, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 48.48, |
|
"learning_rate": 6.145841545132046e-07, |
|
"loss": 0.0079, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 48.88, |
|
"learning_rate": 4.5691761923531733e-07, |
|
"loss": 0.0061, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 49.27, |
|
"learning_rate": 2.994087504927079e-07, |
|
"loss": 0.0081, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 49.66, |
|
"learning_rate": 1.4174221521482067e-07, |
|
"loss": 0.0101, |
|
"step": 126000 |
|
} |
|
], |
|
"logging_steps": 1000, |
|
"max_steps": 126850, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 500, |
|
"total_flos": 1.0572380196462e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|