|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 94.3039502215518, |
|
"global_step": 250000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 8.2074, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 7.333, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6e-06, |
|
"loss": 6.7829, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 6.3812, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1e-05, |
|
"loss": 6.1122, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.2e-05, |
|
"loss": 5.8855, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 5.6587, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 5.4578, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.8e-05, |
|
"loss": 5.2672, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2e-05, |
|
"loss": 5.0883, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 4.9455, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.4e-05, |
|
"loss": 4.7942, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 4.6692, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 4.5511, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3e-05, |
|
"loss": 4.4469, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 4.3566, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 4.2571, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.6e-05, |
|
"loss": 4.1748, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.8e-05, |
|
"loss": 4.1022, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4e-05, |
|
"loss": 4.027, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.2e-05, |
|
"loss": 3.9626, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 3.9079, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 3.844, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.8e-05, |
|
"loss": 3.7907, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 5e-05, |
|
"loss": 3.7452, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"eval_loss": 3.5597941875457764, |
|
"eval_runtime": 298.4056, |
|
"eval_samples_per_second": 461.962, |
|
"eval_steps_per_second": 3.609, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 3.7008, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 3.6604, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 3.6173, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 5.8e-05, |
|
"loss": 3.5777, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 6e-05, |
|
"loss": 3.5396, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 6.2e-05, |
|
"loss": 3.5062, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 3.4766, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 6.6e-05, |
|
"loss": 3.4386, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 3.4131, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 7e-05, |
|
"loss": 3.388, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 7.2e-05, |
|
"loss": 3.3536, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 7.4e-05, |
|
"loss": 3.3314, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 7.6e-05, |
|
"loss": 3.3095, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 3.2802, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 8e-05, |
|
"loss": 3.2583, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 8.2e-05, |
|
"loss": 3.2374, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 8.4e-05, |
|
"loss": 3.2223, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 8.6e-05, |
|
"loss": 3.1986, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 3.1782, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 9e-05, |
|
"loss": 3.1585, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 3.1438, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 9.4e-05, |
|
"loss": 3.1313, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 9.6e-05, |
|
"loss": 3.1168, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 9.8e-05, |
|
"loss": 3.092, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 0.0001, |
|
"loss": 3.0788, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"eval_loss": 2.930635690689087, |
|
"eval_runtime": 283.6019, |
|
"eval_samples_per_second": 486.076, |
|
"eval_steps_per_second": 3.798, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 9.977777777777779e-05, |
|
"loss": 3.0672, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 9.955555555555556e-05, |
|
"loss": 3.0534, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 9.933333333333334e-05, |
|
"loss": 3.0395, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 9.911111111111112e-05, |
|
"loss": 3.0276, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 9.888888888888889e-05, |
|
"loss": 3.0069, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 9.866666666666668e-05, |
|
"loss": 2.9995, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 9.844444444444444e-05, |
|
"loss": 2.9893, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 9.822222222222223e-05, |
|
"loss": 2.9776, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 9.8e-05, |
|
"loss": 2.9693, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 9.777777777777778e-05, |
|
"loss": 2.9528, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 9.755555555555555e-05, |
|
"loss": 2.9467, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 9.733333333333335e-05, |
|
"loss": 2.94, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 9.711111111111111e-05, |
|
"loss": 2.9276, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 9.68888888888889e-05, |
|
"loss": 2.9234, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 2.9092, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 9.644444444444445e-05, |
|
"loss": 2.9046, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 9.622222222222222e-05, |
|
"loss": 2.8948, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 12.83, |
|
"learning_rate": 9.6e-05, |
|
"loss": 2.8869, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 9.577777777777777e-05, |
|
"loss": 2.884, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 9.555555555555557e-05, |
|
"loss": 2.8717, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 9.533333333333334e-05, |
|
"loss": 2.8653, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"learning_rate": 9.511111111111112e-05, |
|
"loss": 2.8565, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 9.488888888888889e-05, |
|
"loss": 2.8524, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 9.466666666666667e-05, |
|
"loss": 2.8449, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"learning_rate": 9.444444444444444e-05, |
|
"loss": 2.8406, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"eval_loss": 2.706800937652588, |
|
"eval_runtime": 283.8148, |
|
"eval_samples_per_second": 485.711, |
|
"eval_steps_per_second": 3.795, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 9.422222222222223e-05, |
|
"loss": 2.835, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"learning_rate": 9.4e-05, |
|
"loss": 2.8281, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"learning_rate": 9.377777777777779e-05, |
|
"loss": 2.8208, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 9.355555555555556e-05, |
|
"loss": 2.8225, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 15.09, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 2.8122, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 15.28, |
|
"learning_rate": 9.311111111111111e-05, |
|
"loss": 2.8051, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"learning_rate": 9.28888888888889e-05, |
|
"loss": 2.8047, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"learning_rate": 9.266666666666666e-05, |
|
"loss": 2.7949, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 15.84, |
|
"learning_rate": 9.244444444444445e-05, |
|
"loss": 2.7948, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 16.03, |
|
"learning_rate": 9.222222222222223e-05, |
|
"loss": 2.7895, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 16.22, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 2.784, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 16.41, |
|
"learning_rate": 9.177777777777778e-05, |
|
"loss": 2.7806, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 16.6, |
|
"learning_rate": 9.155555555555557e-05, |
|
"loss": 2.7774, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"learning_rate": 9.133333333333334e-05, |
|
"loss": 2.769, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 16.97, |
|
"learning_rate": 9.111111111111112e-05, |
|
"loss": 2.7634, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 17.16, |
|
"learning_rate": 9.088888888888889e-05, |
|
"loss": 2.7645, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 17.35, |
|
"learning_rate": 9.066666666666667e-05, |
|
"loss": 2.7576, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 17.54, |
|
"learning_rate": 9.044444444444445e-05, |
|
"loss": 2.7533, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"learning_rate": 9.022222222222224e-05, |
|
"loss": 2.7536, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"learning_rate": 9e-05, |
|
"loss": 2.7447, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"learning_rate": 8.977777777777779e-05, |
|
"loss": 2.7401, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"learning_rate": 8.955555555555556e-05, |
|
"loss": 2.7365, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"learning_rate": 8.933333333333334e-05, |
|
"loss": 2.7349, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"learning_rate": 8.911111111111111e-05, |
|
"loss": 2.7308, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 2.7305, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"eval_loss": 2.608755111694336, |
|
"eval_runtime": 283.7638, |
|
"eval_samples_per_second": 485.798, |
|
"eval_steps_per_second": 3.795, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 19.05, |
|
"learning_rate": 8.866666666666668e-05, |
|
"loss": 2.7327, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 8.844444444444445e-05, |
|
"loss": 2.722, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 8.822222222222223e-05, |
|
"loss": 2.7219, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 2.71, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 19.8, |
|
"learning_rate": 8.777777777777778e-05, |
|
"loss": 2.7107, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 19.99, |
|
"learning_rate": 8.755555555555556e-05, |
|
"loss": 2.7127, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 20.18, |
|
"learning_rate": 8.733333333333333e-05, |
|
"loss": 2.7044, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 20.37, |
|
"learning_rate": 8.711111111111112e-05, |
|
"loss": 2.7036, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"learning_rate": 8.68888888888889e-05, |
|
"loss": 2.7002, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 2.6973, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 20.94, |
|
"learning_rate": 8.644444444444445e-05, |
|
"loss": 2.6931, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 21.12, |
|
"learning_rate": 8.622222222222222e-05, |
|
"loss": 2.6938, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 21.31, |
|
"learning_rate": 8.6e-05, |
|
"loss": 2.6837, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 21.5, |
|
"learning_rate": 8.577777777777777e-05, |
|
"loss": 2.6851, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 21.69, |
|
"learning_rate": 8.555555555555556e-05, |
|
"loss": 2.6848, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 21.88, |
|
"learning_rate": 8.533333333333334e-05, |
|
"loss": 2.6845, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 22.07, |
|
"learning_rate": 8.511111111111112e-05, |
|
"loss": 2.6839, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 22.26, |
|
"learning_rate": 8.488888888888889e-05, |
|
"loss": 2.6746, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 22.44, |
|
"learning_rate": 8.466666666666667e-05, |
|
"loss": 2.6736, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 22.63, |
|
"learning_rate": 8.444444444444444e-05, |
|
"loss": 2.6688, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 22.82, |
|
"learning_rate": 8.422222222222223e-05, |
|
"loss": 2.6699, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 23.01, |
|
"learning_rate": 8.4e-05, |
|
"loss": 2.673, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 23.2, |
|
"learning_rate": 8.377777777777778e-05, |
|
"loss": 2.6606, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 23.39, |
|
"learning_rate": 8.355555555555556e-05, |
|
"loss": 2.6595, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 23.58, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 2.6634, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 23.58, |
|
"eval_loss": 2.541694164276123, |
|
"eval_runtime": 283.9564, |
|
"eval_samples_per_second": 485.469, |
|
"eval_steps_per_second": 3.793, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 23.76, |
|
"learning_rate": 8.311111111111111e-05, |
|
"loss": 2.6599, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 23.95, |
|
"learning_rate": 8.28888888888889e-05, |
|
"loss": 2.6509, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 24.14, |
|
"learning_rate": 8.266666666666667e-05, |
|
"loss": 2.6573, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 24.33, |
|
"learning_rate": 8.244444444444445e-05, |
|
"loss": 2.6497, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 24.52, |
|
"learning_rate": 8.222222222222222e-05, |
|
"loss": 2.6492, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 24.71, |
|
"learning_rate": 8.2e-05, |
|
"loss": 2.646, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 24.9, |
|
"learning_rate": 8.177777777777778e-05, |
|
"loss": 2.6444, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 25.08, |
|
"learning_rate": 8.155555555555557e-05, |
|
"loss": 2.6465, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 25.27, |
|
"learning_rate": 8.133333333333334e-05, |
|
"loss": 2.6412, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 25.46, |
|
"learning_rate": 8.111111111111112e-05, |
|
"loss": 2.6364, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 25.65, |
|
"learning_rate": 8.088888888888889e-05, |
|
"loss": 2.6377, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 25.84, |
|
"learning_rate": 8.066666666666667e-05, |
|
"loss": 2.6331, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 26.03, |
|
"learning_rate": 8.044444444444444e-05, |
|
"loss": 2.6374, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 26.22, |
|
"learning_rate": 8.022222222222222e-05, |
|
"loss": 2.6269, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 26.41, |
|
"learning_rate": 8e-05, |
|
"loss": 2.625, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 26.59, |
|
"learning_rate": 7.977777777777779e-05, |
|
"loss": 2.6279, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 26.78, |
|
"learning_rate": 7.955555555555556e-05, |
|
"loss": 2.6278, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 26.97, |
|
"learning_rate": 7.933333333333334e-05, |
|
"loss": 2.6267, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 27.16, |
|
"learning_rate": 7.911111111111111e-05, |
|
"loss": 2.6273, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 27.35, |
|
"learning_rate": 7.88888888888889e-05, |
|
"loss": 2.6175, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 27.54, |
|
"learning_rate": 7.866666666666666e-05, |
|
"loss": 2.6149, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 27.73, |
|
"learning_rate": 7.844444444444446e-05, |
|
"loss": 2.6183, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 27.91, |
|
"learning_rate": 7.822222222222223e-05, |
|
"loss": 2.6107, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 28.1, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 2.6181, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 28.29, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 2.6134, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 28.29, |
|
"eval_loss": 2.4989469051361084, |
|
"eval_runtime": 283.5232, |
|
"eval_samples_per_second": 486.211, |
|
"eval_steps_per_second": 3.799, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 28.48, |
|
"learning_rate": 7.755555555555556e-05, |
|
"loss": 2.6136, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 28.67, |
|
"learning_rate": 7.733333333333333e-05, |
|
"loss": 2.6061, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 28.86, |
|
"learning_rate": 7.711111111111112e-05, |
|
"loss": 2.6038, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 29.05, |
|
"learning_rate": 7.688888888888889e-05, |
|
"loss": 2.6056, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 29.23, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 2.6044, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 29.42, |
|
"learning_rate": 7.644444444444445e-05, |
|
"loss": 2.6012, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 29.61, |
|
"learning_rate": 7.622222222222223e-05, |
|
"loss": 2.5976, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 29.8, |
|
"learning_rate": 7.6e-05, |
|
"loss": 2.5993, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 29.99, |
|
"learning_rate": 7.577777777777779e-05, |
|
"loss": 2.5964, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 30.18, |
|
"learning_rate": 7.555555555555556e-05, |
|
"loss": 2.5953, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 30.37, |
|
"learning_rate": 7.533333333333334e-05, |
|
"loss": 2.5916, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 30.55, |
|
"learning_rate": 7.511111111111111e-05, |
|
"loss": 2.6004, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 30.74, |
|
"learning_rate": 7.488888888888889e-05, |
|
"loss": 2.5902, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 30.93, |
|
"learning_rate": 7.466666666666667e-05, |
|
"loss": 2.5936, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 31.12, |
|
"learning_rate": 7.444444444444444e-05, |
|
"loss": 2.5939, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 31.31, |
|
"learning_rate": 7.422222222222223e-05, |
|
"loss": 2.5893, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 31.5, |
|
"learning_rate": 7.4e-05, |
|
"loss": 2.5834, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 31.69, |
|
"learning_rate": 7.377777777777778e-05, |
|
"loss": 2.5853, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 31.87, |
|
"learning_rate": 7.355555555555556e-05, |
|
"loss": 2.5825, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 32.06, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 2.5865, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 32.25, |
|
"learning_rate": 7.311111111111111e-05, |
|
"loss": 2.5756, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 32.44, |
|
"learning_rate": 7.28888888888889e-05, |
|
"loss": 2.5774, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 32.63, |
|
"learning_rate": 7.266666666666667e-05, |
|
"loss": 2.5792, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 32.82, |
|
"learning_rate": 7.244444444444445e-05, |
|
"loss": 2.5753, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 33.01, |
|
"learning_rate": 7.222222222222222e-05, |
|
"loss": 2.5812, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 33.01, |
|
"eval_loss": 2.469001531600952, |
|
"eval_runtime": 283.5082, |
|
"eval_samples_per_second": 486.236, |
|
"eval_steps_per_second": 3.799, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 33.19, |
|
"learning_rate": 7.2e-05, |
|
"loss": 2.571, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 33.38, |
|
"learning_rate": 7.177777777777777e-05, |
|
"loss": 2.5736, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 33.57, |
|
"learning_rate": 7.155555555555555e-05, |
|
"loss": 2.5712, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 33.76, |
|
"learning_rate": 7.133333333333334e-05, |
|
"loss": 2.5728, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 33.95, |
|
"learning_rate": 7.111111111111112e-05, |
|
"loss": 2.5676, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 34.14, |
|
"learning_rate": 7.088888888888889e-05, |
|
"loss": 2.5682, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 34.33, |
|
"learning_rate": 7.066666666666667e-05, |
|
"loss": 2.5662, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 34.52, |
|
"learning_rate": 7.044444444444444e-05, |
|
"loss": 2.5671, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 34.7, |
|
"learning_rate": 7.022222222222222e-05, |
|
"loss": 2.5667, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 34.89, |
|
"learning_rate": 7e-05, |
|
"loss": 2.5619, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 35.08, |
|
"learning_rate": 6.977777777777779e-05, |
|
"loss": 2.5639, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 35.27, |
|
"learning_rate": 6.955555555555556e-05, |
|
"loss": 2.5581, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 35.46, |
|
"learning_rate": 6.933333333333334e-05, |
|
"loss": 2.5586, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 35.65, |
|
"learning_rate": 6.911111111111111e-05, |
|
"loss": 2.5612, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 35.84, |
|
"learning_rate": 6.88888888888889e-05, |
|
"loss": 2.5552, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 36.02, |
|
"learning_rate": 6.866666666666666e-05, |
|
"loss": 2.5598, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 36.21, |
|
"learning_rate": 6.844444444444445e-05, |
|
"loss": 2.5479, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 36.4, |
|
"learning_rate": 6.822222222222222e-05, |
|
"loss": 2.5545, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 36.59, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 2.5506, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 36.78, |
|
"learning_rate": 6.777777777777778e-05, |
|
"loss": 2.5526, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 36.97, |
|
"learning_rate": 6.755555555555557e-05, |
|
"loss": 2.552, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 37.16, |
|
"learning_rate": 6.733333333333333e-05, |
|
"loss": 2.5493, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 37.34, |
|
"learning_rate": 6.711111111111112e-05, |
|
"loss": 2.5474, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 37.53, |
|
"learning_rate": 6.688888888888889e-05, |
|
"loss": 2.5475, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 37.72, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 2.5447, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 37.72, |
|
"eval_loss": 2.4395639896392822, |
|
"eval_runtime": 283.6209, |
|
"eval_samples_per_second": 486.043, |
|
"eval_steps_per_second": 3.797, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 37.91, |
|
"learning_rate": 6.644444444444444e-05, |
|
"loss": 2.5444, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 38.1, |
|
"learning_rate": 6.622222222222224e-05, |
|
"loss": 2.5467, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 38.29, |
|
"learning_rate": 6.6e-05, |
|
"loss": 2.537, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 38.48, |
|
"learning_rate": 6.577777777777779e-05, |
|
"loss": 2.5344, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 38.66, |
|
"learning_rate": 6.555555555555556e-05, |
|
"loss": 2.5431, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 38.85, |
|
"learning_rate": 6.533333333333334e-05, |
|
"loss": 2.5435, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 39.04, |
|
"learning_rate": 6.511111111111111e-05, |
|
"loss": 2.5434, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 39.23, |
|
"learning_rate": 6.488888888888889e-05, |
|
"loss": 2.5297, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 39.42, |
|
"learning_rate": 6.466666666666666e-05, |
|
"loss": 2.5408, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 39.61, |
|
"learning_rate": 6.444444444444446e-05, |
|
"loss": 2.5298, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 39.8, |
|
"learning_rate": 6.422222222222223e-05, |
|
"loss": 2.5358, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 39.98, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 2.5379, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 40.17, |
|
"learning_rate": 6.377777777777778e-05, |
|
"loss": 2.5357, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 40.36, |
|
"learning_rate": 6.355555555555556e-05, |
|
"loss": 2.5268, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 40.55, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 2.5263, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 40.74, |
|
"learning_rate": 6.311111111111112e-05, |
|
"loss": 2.5329, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 40.93, |
|
"learning_rate": 6.28888888888889e-05, |
|
"loss": 2.533, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 41.12, |
|
"learning_rate": 6.266666666666667e-05, |
|
"loss": 2.5282, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 41.31, |
|
"learning_rate": 6.244444444444445e-05, |
|
"loss": 2.5227, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 41.49, |
|
"learning_rate": 6.222222222222222e-05, |
|
"loss": 2.5279, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 41.68, |
|
"learning_rate": 6.2e-05, |
|
"loss": 2.5272, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 41.87, |
|
"learning_rate": 6.177777777777779e-05, |
|
"loss": 2.522, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 42.06, |
|
"learning_rate": 6.155555555555555e-05, |
|
"loss": 2.5253, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 42.25, |
|
"learning_rate": 6.133333333333334e-05, |
|
"loss": 2.5215, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 42.44, |
|
"learning_rate": 6.111111111111112e-05, |
|
"loss": 2.5205, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 42.44, |
|
"eval_loss": 2.419991970062256, |
|
"eval_runtime": 283.6709, |
|
"eval_samples_per_second": 485.957, |
|
"eval_steps_per_second": 3.797, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 42.63, |
|
"learning_rate": 6.08888888888889e-05, |
|
"loss": 2.5194, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 42.81, |
|
"learning_rate": 6.066666666666667e-05, |
|
"loss": 2.5179, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 43.0, |
|
"learning_rate": 6.044444444444445e-05, |
|
"loss": 2.526, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 43.19, |
|
"learning_rate": 6.0222222222222225e-05, |
|
"loss": 2.5172, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 43.38, |
|
"learning_rate": 6e-05, |
|
"loss": 2.5114, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 43.57, |
|
"learning_rate": 5.977777777777778e-05, |
|
"loss": 2.5179, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 43.76, |
|
"learning_rate": 5.9555555555555554e-05, |
|
"loss": 2.5189, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 43.95, |
|
"learning_rate": 5.9333333333333343e-05, |
|
"loss": 2.5202, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 44.13, |
|
"learning_rate": 5.911111111111112e-05, |
|
"loss": 2.517, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 44.32, |
|
"learning_rate": 5.8888888888888896e-05, |
|
"loss": 2.5151, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 44.51, |
|
"learning_rate": 5.866666666666667e-05, |
|
"loss": 2.5101, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 44.7, |
|
"learning_rate": 5.844444444444445e-05, |
|
"loss": 2.511, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 44.89, |
|
"learning_rate": 5.8222222222222224e-05, |
|
"loss": 2.5105, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 45.08, |
|
"learning_rate": 5.8e-05, |
|
"loss": 2.5107, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 45.27, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 2.5089, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 45.45, |
|
"learning_rate": 5.755555555555556e-05, |
|
"loss": 2.5051, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 45.64, |
|
"learning_rate": 5.7333333333333336e-05, |
|
"loss": 2.5083, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 45.83, |
|
"learning_rate": 5.711111111111112e-05, |
|
"loss": 2.5072, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 46.02, |
|
"learning_rate": 5.6888888888888895e-05, |
|
"loss": 2.5107, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 46.21, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 2.5036, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 46.4, |
|
"learning_rate": 5.644444444444445e-05, |
|
"loss": 2.5051, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 46.59, |
|
"learning_rate": 5.622222222222222e-05, |
|
"loss": 2.5039, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 46.77, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 2.4972, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 46.96, |
|
"learning_rate": 5.577777777777778e-05, |
|
"loss": 2.5073, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 47.15, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 2.5052, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 47.15, |
|
"eval_loss": 2.4018545150756836, |
|
"eval_runtime": 283.8972, |
|
"eval_samples_per_second": 485.57, |
|
"eval_steps_per_second": 3.794, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 47.34, |
|
"learning_rate": 5.5333333333333334e-05, |
|
"loss": 2.5015, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 47.53, |
|
"learning_rate": 5.511111111111111e-05, |
|
"loss": 2.4983, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 47.72, |
|
"learning_rate": 5.488888888888889e-05, |
|
"loss": 2.5022, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 47.91, |
|
"learning_rate": 5.466666666666666e-05, |
|
"loss": 2.4932, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 48.1, |
|
"learning_rate": 5.4444444444444446e-05, |
|
"loss": 2.5007, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 48.28, |
|
"learning_rate": 5.422222222222223e-05, |
|
"loss": 2.4944, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 48.47, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 2.4964, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 48.66, |
|
"learning_rate": 5.377777777777778e-05, |
|
"loss": 2.4976, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 48.85, |
|
"learning_rate": 5.355555555555556e-05, |
|
"loss": 2.4948, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 49.04, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 2.5018, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 49.23, |
|
"learning_rate": 5.311111111111111e-05, |
|
"loss": 2.4919, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 49.42, |
|
"learning_rate": 5.2888888888888885e-05, |
|
"loss": 2.4925, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 49.6, |
|
"learning_rate": 5.266666666666666e-05, |
|
"loss": 2.491, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 49.79, |
|
"learning_rate": 5.244444444444445e-05, |
|
"loss": 2.4932, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 49.98, |
|
"learning_rate": 5.222222222222223e-05, |
|
"loss": 2.4892, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 50.17, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 2.4877, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 50.36, |
|
"learning_rate": 5.177777777777778e-05, |
|
"loss": 2.4899, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 50.55, |
|
"learning_rate": 5.1555555555555556e-05, |
|
"loss": 2.49, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 50.74, |
|
"learning_rate": 5.133333333333333e-05, |
|
"loss": 2.4843, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 50.92, |
|
"learning_rate": 5.111111111111111e-05, |
|
"loss": 2.4901, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 51.11, |
|
"learning_rate": 5.0888888888888884e-05, |
|
"loss": 2.4919, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 51.3, |
|
"learning_rate": 5.0666666666666674e-05, |
|
"loss": 2.4862, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 51.49, |
|
"learning_rate": 5.044444444444445e-05, |
|
"loss": 2.4813, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 51.68, |
|
"learning_rate": 5.0222222222222226e-05, |
|
"loss": 2.4818, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 51.87, |
|
"learning_rate": 5e-05, |
|
"loss": 2.4834, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 51.87, |
|
"eval_loss": 2.3841142654418945, |
|
"eval_runtime": 283.7333, |
|
"eval_samples_per_second": 485.851, |
|
"eval_steps_per_second": 3.796, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 52.06, |
|
"learning_rate": 4.977777777777778e-05, |
|
"loss": 2.4823, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 52.24, |
|
"learning_rate": 4.955555555555556e-05, |
|
"loss": 2.4773, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 52.43, |
|
"learning_rate": 4.933333333333334e-05, |
|
"loss": 2.4818, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 52.62, |
|
"learning_rate": 4.9111111111111114e-05, |
|
"loss": 2.4807, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 52.81, |
|
"learning_rate": 4.888888888888889e-05, |
|
"loss": 2.4801, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 53.0, |
|
"learning_rate": 4.866666666666667e-05, |
|
"loss": 2.4808, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 53.19, |
|
"learning_rate": 4.844444444444445e-05, |
|
"loss": 2.4788, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 53.38, |
|
"learning_rate": 4.8222222222222225e-05, |
|
"loss": 2.478, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 53.56, |
|
"learning_rate": 4.8e-05, |
|
"loss": 2.4758, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 53.75, |
|
"learning_rate": 4.7777777777777784e-05, |
|
"loss": 2.4792, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 53.94, |
|
"learning_rate": 4.755555555555556e-05, |
|
"loss": 2.4781, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 54.13, |
|
"learning_rate": 4.7333333333333336e-05, |
|
"loss": 2.4773, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 54.32, |
|
"learning_rate": 4.711111111111111e-05, |
|
"loss": 2.4725, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 54.51, |
|
"learning_rate": 4.6888888888888895e-05, |
|
"loss": 2.4748, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 54.7, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 2.4751, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 54.88, |
|
"learning_rate": 4.644444444444445e-05, |
|
"loss": 2.47, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 55.07, |
|
"learning_rate": 4.6222222222222224e-05, |
|
"loss": 2.4796, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 55.26, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 2.4717, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 55.45, |
|
"learning_rate": 4.577777777777778e-05, |
|
"loss": 2.4759, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 55.64, |
|
"learning_rate": 4.555555555555556e-05, |
|
"loss": 2.4696, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 55.83, |
|
"learning_rate": 4.5333333333333335e-05, |
|
"loss": 2.4707, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 56.02, |
|
"learning_rate": 4.511111111111112e-05, |
|
"loss": 2.4664, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 56.21, |
|
"learning_rate": 4.4888888888888894e-05, |
|
"loss": 2.4663, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 56.39, |
|
"learning_rate": 4.466666666666667e-05, |
|
"loss": 2.4679, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 56.58, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 2.4694, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 56.58, |
|
"eval_loss": 2.371152877807617, |
|
"eval_runtime": 283.5132, |
|
"eval_samples_per_second": 486.228, |
|
"eval_steps_per_second": 3.799, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 56.77, |
|
"learning_rate": 4.422222222222222e-05, |
|
"loss": 2.4619, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 56.96, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 2.4686, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 57.15, |
|
"learning_rate": 4.377777777777778e-05, |
|
"loss": 2.4715, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 57.34, |
|
"learning_rate": 4.355555555555556e-05, |
|
"loss": 2.4619, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 57.53, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 2.463, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 57.71, |
|
"learning_rate": 4.311111111111111e-05, |
|
"loss": 2.4609, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 57.9, |
|
"learning_rate": 4.2888888888888886e-05, |
|
"loss": 2.4646, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 58.09, |
|
"learning_rate": 4.266666666666667e-05, |
|
"loss": 2.4658, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 58.28, |
|
"learning_rate": 4.2444444444444445e-05, |
|
"loss": 2.4614, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 58.47, |
|
"learning_rate": 4.222222222222222e-05, |
|
"loss": 2.4646, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 58.66, |
|
"learning_rate": 4.2e-05, |
|
"loss": 2.4629, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 58.85, |
|
"learning_rate": 4.177777777777778e-05, |
|
"loss": 2.4587, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 59.03, |
|
"learning_rate": 4.155555555555556e-05, |
|
"loss": 2.466, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 59.22, |
|
"learning_rate": 4.133333333333333e-05, |
|
"loss": 2.452, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 59.41, |
|
"learning_rate": 4.111111111111111e-05, |
|
"loss": 2.4608, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 59.6, |
|
"learning_rate": 4.088888888888889e-05, |
|
"loss": 2.4629, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 59.79, |
|
"learning_rate": 4.066666666666667e-05, |
|
"loss": 2.4605, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 59.98, |
|
"learning_rate": 4.0444444444444444e-05, |
|
"loss": 2.4576, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 60.17, |
|
"learning_rate": 4.022222222222222e-05, |
|
"loss": 2.4599, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 60.35, |
|
"learning_rate": 4e-05, |
|
"loss": 2.4567, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 60.54, |
|
"learning_rate": 3.977777777777778e-05, |
|
"loss": 2.4604, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 60.73, |
|
"learning_rate": 3.9555555555555556e-05, |
|
"loss": 2.4565, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 60.92, |
|
"learning_rate": 3.933333333333333e-05, |
|
"loss": 2.4559, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 61.11, |
|
"learning_rate": 3.9111111111111115e-05, |
|
"loss": 2.4587, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 61.3, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 2.4509, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 61.3, |
|
"eval_loss": 2.3613131046295166, |
|
"eval_runtime": 283.6567, |
|
"eval_samples_per_second": 485.982, |
|
"eval_steps_per_second": 3.797, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 61.49, |
|
"learning_rate": 3.866666666666667e-05, |
|
"loss": 2.4554, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 61.67, |
|
"learning_rate": 3.844444444444444e-05, |
|
"loss": 2.4562, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 61.86, |
|
"learning_rate": 3.8222222222222226e-05, |
|
"loss": 2.4511, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 62.05, |
|
"learning_rate": 3.8e-05, |
|
"loss": 2.4578, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 62.24, |
|
"learning_rate": 3.777777777777778e-05, |
|
"loss": 2.4513, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 62.43, |
|
"learning_rate": 3.7555555555555554e-05, |
|
"loss": 2.4509, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 62.62, |
|
"learning_rate": 3.733333333333334e-05, |
|
"loss": 2.4498, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 62.81, |
|
"learning_rate": 3.7111111111111113e-05, |
|
"loss": 2.4499, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 62.99, |
|
"learning_rate": 3.688888888888889e-05, |
|
"loss": 2.4543, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 63.18, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 2.4498, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 63.37, |
|
"learning_rate": 3.644444444444445e-05, |
|
"loss": 2.4463, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 63.56, |
|
"learning_rate": 3.6222222222222225e-05, |
|
"loss": 2.4491, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 63.75, |
|
"learning_rate": 3.6e-05, |
|
"loss": 2.4532, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 63.94, |
|
"learning_rate": 3.577777777777778e-05, |
|
"loss": 2.4441, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 64.13, |
|
"learning_rate": 3.555555555555556e-05, |
|
"loss": 2.45, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 64.32, |
|
"learning_rate": 3.5333333333333336e-05, |
|
"loss": 2.4453, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 64.5, |
|
"learning_rate": 3.511111111111111e-05, |
|
"loss": 2.4463, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 64.69, |
|
"learning_rate": 3.4888888888888895e-05, |
|
"loss": 2.4457, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 64.88, |
|
"learning_rate": 3.466666666666667e-05, |
|
"loss": 2.4477, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 65.07, |
|
"learning_rate": 3.444444444444445e-05, |
|
"loss": 2.4467, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 65.26, |
|
"learning_rate": 3.4222222222222224e-05, |
|
"loss": 2.447, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 65.45, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 2.443, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 65.64, |
|
"learning_rate": 3.377777777777778e-05, |
|
"loss": 2.443, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 65.82, |
|
"learning_rate": 3.355555555555556e-05, |
|
"loss": 2.4441, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 66.01, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 2.4469, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 66.01, |
|
"eval_loss": 2.3445606231689453, |
|
"eval_runtime": 283.6225, |
|
"eval_samples_per_second": 486.04, |
|
"eval_steps_per_second": 3.797, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 66.2, |
|
"learning_rate": 3.311111111111112e-05, |
|
"loss": 2.4367, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 66.39, |
|
"learning_rate": 3.2888888888888894e-05, |
|
"loss": 2.4414, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 66.58, |
|
"learning_rate": 3.266666666666667e-05, |
|
"loss": 2.4424, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 66.77, |
|
"learning_rate": 3.2444444444444446e-05, |
|
"loss": 2.4405, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 66.96, |
|
"learning_rate": 3.222222222222223e-05, |
|
"loss": 2.4415, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 67.14, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 2.4398, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 67.33, |
|
"learning_rate": 3.177777777777778e-05, |
|
"loss": 2.4406, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 67.52, |
|
"learning_rate": 3.155555555555556e-05, |
|
"loss": 2.4375, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 67.71, |
|
"learning_rate": 3.1333333333333334e-05, |
|
"loss": 2.4365, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 67.9, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 2.4415, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 68.09, |
|
"learning_rate": 3.088888888888889e-05, |
|
"loss": 2.4391, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 68.28, |
|
"learning_rate": 3.066666666666667e-05, |
|
"loss": 2.437, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 68.46, |
|
"learning_rate": 3.044444444444445e-05, |
|
"loss": 2.4357, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 68.65, |
|
"learning_rate": 3.0222222222222225e-05, |
|
"loss": 2.4375, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 68.84, |
|
"learning_rate": 3e-05, |
|
"loss": 2.4356, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 69.03, |
|
"learning_rate": 2.9777777777777777e-05, |
|
"loss": 2.4364, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 69.22, |
|
"learning_rate": 2.955555555555556e-05, |
|
"loss": 2.4335, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 69.41, |
|
"learning_rate": 2.9333333333333336e-05, |
|
"loss": 2.4325, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 69.6, |
|
"learning_rate": 2.9111111111111112e-05, |
|
"loss": 2.434, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 69.78, |
|
"learning_rate": 2.8888888888888888e-05, |
|
"loss": 2.4397, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 69.97, |
|
"learning_rate": 2.8666666666666668e-05, |
|
"loss": 2.4375, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 70.16, |
|
"learning_rate": 2.8444444444444447e-05, |
|
"loss": 2.4338, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 70.35, |
|
"learning_rate": 2.8222222222222223e-05, |
|
"loss": 2.4317, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 70.54, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 2.4321, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 70.73, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 2.4296, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 70.73, |
|
"eval_loss": 2.3378496170043945, |
|
"eval_runtime": 283.6193, |
|
"eval_samples_per_second": 486.046, |
|
"eval_steps_per_second": 3.797, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 70.92, |
|
"learning_rate": 2.7555555555555555e-05, |
|
"loss": 2.4349, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 71.11, |
|
"learning_rate": 2.733333333333333e-05, |
|
"loss": 2.4326, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 71.29, |
|
"learning_rate": 2.7111111111111114e-05, |
|
"loss": 2.4284, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 71.48, |
|
"learning_rate": 2.688888888888889e-05, |
|
"loss": 2.4299, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 71.67, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 2.4317, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 71.86, |
|
"learning_rate": 2.6444444444444443e-05, |
|
"loss": 2.4288, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 72.05, |
|
"learning_rate": 2.6222222222222226e-05, |
|
"loss": 2.4322, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 72.24, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 2.4317, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 72.43, |
|
"learning_rate": 2.5777777777777778e-05, |
|
"loss": 2.4262, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 72.61, |
|
"learning_rate": 2.5555555555555554e-05, |
|
"loss": 2.4239, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 72.8, |
|
"learning_rate": 2.5333333333333337e-05, |
|
"loss": 2.4279, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 72.99, |
|
"learning_rate": 2.5111111111111113e-05, |
|
"loss": 2.4271, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 73.18, |
|
"learning_rate": 2.488888888888889e-05, |
|
"loss": 2.4349, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 73.37, |
|
"learning_rate": 2.466666666666667e-05, |
|
"loss": 2.4285, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 73.56, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 2.4217, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 73.75, |
|
"learning_rate": 2.4222222222222224e-05, |
|
"loss": 2.4215, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 73.93, |
|
"learning_rate": 2.4e-05, |
|
"loss": 2.4267, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 74.12, |
|
"learning_rate": 2.377777777777778e-05, |
|
"loss": 2.4281, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 74.31, |
|
"learning_rate": 2.3555555555555556e-05, |
|
"loss": 2.4238, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 74.5, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 2.4243, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 74.69, |
|
"learning_rate": 2.3111111111111112e-05, |
|
"loss": 2.4238, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 74.88, |
|
"learning_rate": 2.288888888888889e-05, |
|
"loss": 2.4231, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 75.07, |
|
"learning_rate": 2.2666666666666668e-05, |
|
"loss": 2.4231, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 75.25, |
|
"learning_rate": 2.2444444444444447e-05, |
|
"loss": 2.4238, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 75.44, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 2.4213, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 75.44, |
|
"eval_loss": 2.328047513961792, |
|
"eval_runtime": 283.8545, |
|
"eval_samples_per_second": 485.643, |
|
"eval_steps_per_second": 3.794, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 75.63, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 2.4195, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 75.82, |
|
"learning_rate": 2.177777777777778e-05, |
|
"loss": 2.427, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 76.01, |
|
"learning_rate": 2.1555555555555555e-05, |
|
"loss": 2.4211, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 76.2, |
|
"learning_rate": 2.1333333333333335e-05, |
|
"loss": 2.421, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 76.39, |
|
"learning_rate": 2.111111111111111e-05, |
|
"loss": 2.4212, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 76.57, |
|
"learning_rate": 2.088888888888889e-05, |
|
"loss": 2.4211, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 76.76, |
|
"learning_rate": 2.0666666666666666e-05, |
|
"loss": 2.4172, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 76.95, |
|
"learning_rate": 2.0444444444444446e-05, |
|
"loss": 2.4197, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 77.14, |
|
"learning_rate": 2.0222222222222222e-05, |
|
"loss": 2.4202, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 77.33, |
|
"learning_rate": 2e-05, |
|
"loss": 2.4198, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 77.52, |
|
"learning_rate": 1.9777777777777778e-05, |
|
"loss": 2.4182, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 77.71, |
|
"learning_rate": 1.9555555555555557e-05, |
|
"loss": 2.4151, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 77.89, |
|
"learning_rate": 1.9333333333333333e-05, |
|
"loss": 2.417, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 78.08, |
|
"learning_rate": 1.9111111111111113e-05, |
|
"loss": 2.4166, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 78.27, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 2.4173, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 78.46, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 2.413, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 78.65, |
|
"learning_rate": 1.8444444444444445e-05, |
|
"loss": 2.4116, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 78.84, |
|
"learning_rate": 1.8222222222222224e-05, |
|
"loss": 2.4129, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 79.03, |
|
"learning_rate": 1.8e-05, |
|
"loss": 2.4186, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 79.22, |
|
"learning_rate": 1.777777777777778e-05, |
|
"loss": 2.4105, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 79.4, |
|
"learning_rate": 1.7555555555555556e-05, |
|
"loss": 2.4136, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 79.59, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 2.4117, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 79.78, |
|
"learning_rate": 1.7111111111111112e-05, |
|
"loss": 2.412, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 79.97, |
|
"learning_rate": 1.688888888888889e-05, |
|
"loss": 2.414, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 80.16, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 2.4166, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 80.16, |
|
"eval_loss": 2.32003116607666, |
|
"eval_runtime": 283.7234, |
|
"eval_samples_per_second": 485.867, |
|
"eval_steps_per_second": 3.796, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 80.35, |
|
"learning_rate": 1.6444444444444447e-05, |
|
"loss": 2.4142, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 80.54, |
|
"learning_rate": 1.6222222222222223e-05, |
|
"loss": 2.4085, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 80.72, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 2.4121, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 80.91, |
|
"learning_rate": 1.577777777777778e-05, |
|
"loss": 2.4077, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 81.1, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 2.4129, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 81.29, |
|
"learning_rate": 1.5333333333333334e-05, |
|
"loss": 2.4103, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 81.48, |
|
"learning_rate": 1.5111111111111112e-05, |
|
"loss": 2.4116, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 81.67, |
|
"learning_rate": 1.4888888888888888e-05, |
|
"loss": 2.4073, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 81.86, |
|
"learning_rate": 1.4666666666666668e-05, |
|
"loss": 2.4111, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 82.04, |
|
"learning_rate": 1.4444444444444444e-05, |
|
"loss": 2.4099, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 82.23, |
|
"learning_rate": 1.4222222222222224e-05, |
|
"loss": 2.4051, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 82.42, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 2.406, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 82.61, |
|
"learning_rate": 1.3777777777777778e-05, |
|
"loss": 2.4115, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 82.8, |
|
"learning_rate": 1.3555555555555557e-05, |
|
"loss": 2.4027, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 82.99, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 2.4124, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 83.18, |
|
"learning_rate": 1.3111111111111113e-05, |
|
"loss": 2.4104, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 83.36, |
|
"learning_rate": 1.2888888888888889e-05, |
|
"loss": 2.405, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 83.55, |
|
"learning_rate": 1.2666666666666668e-05, |
|
"loss": 2.4078, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 83.74, |
|
"learning_rate": 1.2444444444444445e-05, |
|
"loss": 2.4091, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 83.93, |
|
"learning_rate": 1.2222222222222222e-05, |
|
"loss": 2.4013, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 84.12, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.4117, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 84.31, |
|
"learning_rate": 1.1777777777777778e-05, |
|
"loss": 2.408, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 84.5, |
|
"learning_rate": 1.1555555555555556e-05, |
|
"loss": 2.4035, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 84.68, |
|
"learning_rate": 1.1333333333333334e-05, |
|
"loss": 2.4005, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 84.87, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 2.3995, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 84.87, |
|
"eval_loss": 2.3128256797790527, |
|
"eval_runtime": 283.7082, |
|
"eval_samples_per_second": 485.894, |
|
"eval_steps_per_second": 3.796, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 85.06, |
|
"learning_rate": 1.088888888888889e-05, |
|
"loss": 2.41, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 85.25, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 2.4006, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 85.44, |
|
"learning_rate": 1.0444444444444445e-05, |
|
"loss": 2.4071, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 85.63, |
|
"learning_rate": 1.0222222222222223e-05, |
|
"loss": 2.4026, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 85.82, |
|
"learning_rate": 1e-05, |
|
"loss": 2.4054, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 86.01, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 2.406, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 86.19, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 2.4008, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 86.38, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 2.4, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 86.57, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 2.4014, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 86.76, |
|
"learning_rate": 8.88888888888889e-06, |
|
"loss": 2.4026, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 86.95, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 2.4038, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 87.14, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 2.4036, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 87.33, |
|
"learning_rate": 8.222222222222223e-06, |
|
"loss": 2.4024, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 87.51, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.4045, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 87.7, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 2.4001, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 87.89, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 2.4001, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 88.08, |
|
"learning_rate": 7.333333333333334e-06, |
|
"loss": 2.4025, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 88.27, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 2.3957, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 88.46, |
|
"learning_rate": 6.888888888888889e-06, |
|
"loss": 2.3971, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 88.65, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.4007, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 88.83, |
|
"learning_rate": 6.4444444444444445e-06, |
|
"loss": 2.4002, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 89.02, |
|
"learning_rate": 6.222222222222222e-06, |
|
"loss": 2.3995, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 89.21, |
|
"learning_rate": 6e-06, |
|
"loss": 2.3985, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 89.4, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 2.4028, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 89.59, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 2.395, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 89.59, |
|
"eval_loss": 2.3064496517181396, |
|
"eval_runtime": 283.9176, |
|
"eval_samples_per_second": 485.535, |
|
"eval_steps_per_second": 3.793, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 89.78, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 2.3968, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 89.97, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 2.3952, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 90.15, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 2.4006, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 90.34, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 2.3952, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 90.53, |
|
"learning_rate": 4.444444444444445e-06, |
|
"loss": 2.3949, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 90.72, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 2.3966, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 90.91, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.3955, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 91.1, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 2.4001, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 91.29, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 2.3956, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 91.47, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 2.3951, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 91.66, |
|
"learning_rate": 3.111111111111111e-06, |
|
"loss": 2.4001, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 91.85, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 2.3945, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 92.04, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 2.395, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 92.23, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 2.3963, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 92.42, |
|
"learning_rate": 2.2222222222222225e-06, |
|
"loss": 2.3944, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 92.61, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.3951, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 92.79, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 2.3897, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 92.98, |
|
"learning_rate": 1.5555555555555556e-06, |
|
"loss": 2.3926, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 93.17, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 2.3968, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 93.36, |
|
"learning_rate": 1.1111111111111112e-06, |
|
"loss": 2.3924, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 93.55, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 2.3908, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 93.74, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 2.3932, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 93.93, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 2.3942, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 94.12, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 2.398, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 94.3, |
|
"learning_rate": 0.0, |
|
"loss": 2.3932, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 94.3, |
|
"eval_loss": 2.302946090698242, |
|
"eval_runtime": 283.7177, |
|
"eval_samples_per_second": 485.877, |
|
"eval_steps_per_second": 3.796, |
|
"step": 250000 |
|
} |
|
], |
|
"max_steps": 250000, |
|
"num_train_epochs": 95, |
|
"total_flos": 4.2134352665063916e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|