|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 33.78378378378378, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.4e-07, |
|
"loss": 1.0761, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 0.9171, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.44e-06, |
|
"loss": 0.9707, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.94e-06, |
|
"loss": 1.0289, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.4400000000000004e-06, |
|
"loss": 0.9489, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 0.9441, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.44e-06, |
|
"loss": 1.5917, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.94e-06, |
|
"loss": 1.6768, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.440000000000001e-06, |
|
"loss": 1.5563, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.94e-06, |
|
"loss": 1.6267, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 1.8075, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.94e-06, |
|
"loss": 1.804, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 6.440000000000001e-06, |
|
"loss": 2.3545, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 6.9400000000000005e-06, |
|
"loss": 3.6551, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 4.6441, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 7.92e-06, |
|
"loss": 5.3999, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.42e-06, |
|
"loss": 6.0272, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 5.7311, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 9.42e-06, |
|
"loss": 6.0189, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 7.2512, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 9.953333333333333e-06, |
|
"loss": 6.7626, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 9.89777777777778e-06, |
|
"loss": 5.912, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 9.842222222222223e-06, |
|
"loss": 7.2678, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 9.786666666666667e-06, |
|
"loss": 7.7191, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 9.731111111111113e-06, |
|
"loss": 5.3031, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 9.675555555555555e-06, |
|
"loss": 7.4638, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 9.620000000000001e-06, |
|
"loss": 6.5911, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 9.564444444444445e-06, |
|
"loss": 5.5009, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 9.508888888888889e-06, |
|
"loss": 7.2869, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 9.455555555555557e-06, |
|
"loss": 6.3539, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 9.402222222222222e-06, |
|
"loss": 4.408, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 9.346666666666666e-06, |
|
"loss": 6.2703, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 9.291111111111112e-06, |
|
"loss": 6.6461, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 9.235555555555556e-06, |
|
"loss": 5.4521, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 9.180000000000002e-06, |
|
"loss": 6.3987, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 9.124444444444444e-06, |
|
"loss": 6.1241, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 9.06888888888889e-06, |
|
"loss": 4.0049, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 9.013333333333334e-06, |
|
"loss": 5.8297, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 8.957777777777778e-06, |
|
"loss": 5.7196, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 8.902222222222224e-06, |
|
"loss": 4.9319, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"eval_loss": 10.077404975891113, |
|
"eval_runtime": 246.976, |
|
"eval_samples_per_second": 9.973, |
|
"eval_steps_per_second": 0.624, |
|
"eval_wer": 73.98924171438487, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 8.846666666666668e-06, |
|
"loss": 5.367, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 8.791111111111112e-06, |
|
"loss": 5.9988, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 8.735555555555556e-06, |
|
"loss": 3.7539, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 8.68e-06, |
|
"loss": 4.6308, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 8.624444444444446e-06, |
|
"loss": 5.3367, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 8.56888888888889e-06, |
|
"loss": 4.8021, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 8.513333333333335e-06, |
|
"loss": 4.3516, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 8.457777777777778e-06, |
|
"loss": 5.1473, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 8.402222222222223e-06, |
|
"loss": 4.1365, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 8.346666666666668e-06, |
|
"loss": 3.7336, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 8.291111111111112e-06, |
|
"loss": 4.9819, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 8.235555555555557e-06, |
|
"loss": 4.1889, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 8.18e-06, |
|
"loss": 3.6101, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 8.124444444444445e-06, |
|
"loss": 4.9865, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 8.06888888888889e-06, |
|
"loss": 4.0579, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 8.013333333333333e-06, |
|
"loss": 3.2807, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 7.957777777777779e-06, |
|
"loss": 5.2015, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 7.902222222222223e-06, |
|
"loss": 4.2603, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 7.846666666666667e-06, |
|
"loss": 2.8056, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 7.791111111111111e-06, |
|
"loss": 4.4941, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 7.735555555555557e-06, |
|
"loss": 3.6729, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 2.5204, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 7.624444444444445e-06, |
|
"loss": 4.6931, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 7.56888888888889e-06, |
|
"loss": 4.064, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 7.513333333333334e-06, |
|
"loss": 2.5234, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 7.457777777777778e-06, |
|
"loss": 3.3534, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 7.402222222222223e-06, |
|
"loss": 3.4619, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 7.346666666666668e-06, |
|
"loss": 2.6103, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 7.291111111111112e-06, |
|
"loss": 3.8041, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 7.235555555555556e-06, |
|
"loss": 3.5229, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 11.99, |
|
"learning_rate": 7.180000000000001e-06, |
|
"loss": 2.5819, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 7.124444444444445e-06, |
|
"loss": 3.2352, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 7.06888888888889e-06, |
|
"loss": 3.1571, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 7.0133333333333345e-06, |
|
"loss": 2.2571, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"learning_rate": 6.9577777777777785e-06, |
|
"loss": 3.1133, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 6.902222222222223e-06, |
|
"loss": 3.0995, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 6.846666666666667e-06, |
|
"loss": 2.1959, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 6.7911111111111115e-06, |
|
"loss": 2.8142, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 6.735555555555556e-06, |
|
"loss": 3.1267, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 6.680000000000001e-06, |
|
"loss": 2.6116, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"eval_loss": 11.408943176269531, |
|
"eval_runtime": 241.1367, |
|
"eval_samples_per_second": 10.214, |
|
"eval_steps_per_second": 0.639, |
|
"eval_wer": 67.04841228526809, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 6.6244444444444445e-06, |
|
"loss": 2.517, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 6.568888888888889e-06, |
|
"loss": 3.0989, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"learning_rate": 6.513333333333333e-06, |
|
"loss": 2.0728, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"learning_rate": 6.457777777777778e-06, |
|
"loss": 2.0813, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 14.36, |
|
"learning_rate": 6.402222222222223e-06, |
|
"loss": 3.0881, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 6.346666666666668e-06, |
|
"loss": 2.3762, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 14.7, |
|
"learning_rate": 6.291111111111111e-06, |
|
"loss": 2.2093, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"learning_rate": 6.235555555555556e-06, |
|
"loss": 2.8965, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 6.18e-06, |
|
"loss": 2.3463, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 6.124444444444445e-06, |
|
"loss": 1.6227, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"learning_rate": 6.06888888888889e-06, |
|
"loss": 2.7423, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 15.54, |
|
"learning_rate": 6.013333333333335e-06, |
|
"loss": 2.522, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"learning_rate": 5.957777777777778e-06, |
|
"loss": 1.6232, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 5.902222222222223e-06, |
|
"loss": 2.6979, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 16.05, |
|
"learning_rate": 5.846666666666667e-06, |
|
"loss": 2.0168, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 16.22, |
|
"learning_rate": 5.791111111111112e-06, |
|
"loss": 1.3373, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"learning_rate": 5.735555555555557e-06, |
|
"loss": 2.2285, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"learning_rate": 5.68e-06, |
|
"loss": 2.3166, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"learning_rate": 5.624444444444445e-06, |
|
"loss": 1.387, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"learning_rate": 5.56888888888889e-06, |
|
"loss": 2.4226, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 17.06, |
|
"learning_rate": 5.513333333333334e-06, |
|
"loss": 2.0713, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 17.23, |
|
"learning_rate": 5.4577777777777785e-06, |
|
"loss": 1.2785, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 5.402222222222223e-06, |
|
"loss": 1.8165, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 17.57, |
|
"learning_rate": 5.346666666666667e-06, |
|
"loss": 1.9608, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"learning_rate": 5.2911111111111115e-06, |
|
"loss": 1.5858, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 5.235555555555556e-06, |
|
"loss": 2.024, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 5.18e-06, |
|
"loss": 1.8938, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 18.24, |
|
"learning_rate": 5.124444444444445e-06, |
|
"loss": 1.1712, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"learning_rate": 5.06888888888889e-06, |
|
"loss": 1.8954, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 18.58, |
|
"learning_rate": 5.013333333333333e-06, |
|
"loss": 1.7302, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 4.957777777777778e-06, |
|
"loss": 1.3895, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"learning_rate": 4.902222222222222e-06, |
|
"loss": 1.4083, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 19.09, |
|
"learning_rate": 4.846666666666667e-06, |
|
"loss": 1.7037, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"learning_rate": 4.791111111111111e-06, |
|
"loss": 1.0114, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 4.735555555555556e-06, |
|
"loss": 1.4758, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 19.59, |
|
"learning_rate": 4.680000000000001e-06, |
|
"loss": 1.7263, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"learning_rate": 4.624444444444445e-06, |
|
"loss": 1.311, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 19.93, |
|
"learning_rate": 4.568888888888889e-06, |
|
"loss": 1.1316, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 20.1, |
|
"learning_rate": 4.513333333333333e-06, |
|
"loss": 1.5401, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 20.27, |
|
"learning_rate": 4.457777777777778e-06, |
|
"loss": 0.9607, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 20.27, |
|
"eval_loss": 11.82657241821289, |
|
"eval_runtime": 243.7624, |
|
"eval_samples_per_second": 10.104, |
|
"eval_steps_per_second": 0.632, |
|
"eval_wer": 60.944820406038524, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 20.44, |
|
"learning_rate": 4.402222222222223e-06, |
|
"loss": 1.2625, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 20.61, |
|
"learning_rate": 4.346666666666667e-06, |
|
"loss": 1.4382, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"learning_rate": 4.291111111111112e-06, |
|
"loss": 1.1822, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 20.95, |
|
"learning_rate": 4.235555555555556e-06, |
|
"loss": 0.683, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 21.11, |
|
"learning_rate": 4.18e-06, |
|
"loss": 1.3987, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 21.28, |
|
"learning_rate": 4.124444444444445e-06, |
|
"loss": 1.0554, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 21.45, |
|
"learning_rate": 4.0688888888888896e-06, |
|
"loss": 0.9125, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 21.62, |
|
"learning_rate": 4.013333333333334e-06, |
|
"loss": 1.2349, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 21.79, |
|
"learning_rate": 3.9577777777777785e-06, |
|
"loss": 1.4298, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 21.96, |
|
"learning_rate": 3.9022222222222225e-06, |
|
"loss": 0.8018, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 22.13, |
|
"learning_rate": 3.8466666666666665e-06, |
|
"loss": 1.2586, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 22.3, |
|
"learning_rate": 3.7911111111111114e-06, |
|
"loss": 0.9665, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 22.47, |
|
"learning_rate": 3.7355555555555555e-06, |
|
"loss": 0.4815, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 22.64, |
|
"learning_rate": 3.6800000000000003e-06, |
|
"loss": 1.1611, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 22.8, |
|
"learning_rate": 3.624444444444445e-06, |
|
"loss": 0.9511, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 22.97, |
|
"learning_rate": 3.568888888888889e-06, |
|
"loss": 0.4956, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 23.14, |
|
"learning_rate": 3.5133333333333337e-06, |
|
"loss": 0.7079, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 23.31, |
|
"learning_rate": 3.457777777777778e-06, |
|
"loss": 0.7881, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 23.48, |
|
"learning_rate": 3.4022222222222222e-06, |
|
"loss": 0.3986, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 23.65, |
|
"learning_rate": 3.346666666666667e-06, |
|
"loss": 0.8256, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 23.82, |
|
"learning_rate": 3.2911111111111116e-06, |
|
"loss": 0.6971, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 23.99, |
|
"learning_rate": 3.2355555555555556e-06, |
|
"loss": 0.5003, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 24.16, |
|
"learning_rate": 3.1800000000000005e-06, |
|
"loss": 0.7882, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 24.32, |
|
"learning_rate": 3.124444444444445e-06, |
|
"loss": 0.5888, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 24.49, |
|
"learning_rate": 3.068888888888889e-06, |
|
"loss": 0.6318, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 24.66, |
|
"learning_rate": 3.013333333333334e-06, |
|
"loss": 0.8246, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 24.83, |
|
"learning_rate": 2.957777777777778e-06, |
|
"loss": 0.7554, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 2.9022222222222223e-06, |
|
"loss": 0.5072, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 25.17, |
|
"learning_rate": 2.8466666666666672e-06, |
|
"loss": 0.6132, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 25.34, |
|
"learning_rate": 2.7911111111111113e-06, |
|
"loss": 0.661, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 25.51, |
|
"learning_rate": 2.7355555555555557e-06, |
|
"loss": 0.4763, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 25.68, |
|
"learning_rate": 2.68e-06, |
|
"loss": 0.5485, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 25.84, |
|
"learning_rate": 2.6244444444444446e-06, |
|
"loss": 0.5781, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 26.01, |
|
"learning_rate": 2.568888888888889e-06, |
|
"loss": 0.4007, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 26.18, |
|
"learning_rate": 2.5133333333333336e-06, |
|
"loss": 0.3914, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 26.35, |
|
"learning_rate": 2.457777777777778e-06, |
|
"loss": 0.4562, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 26.52, |
|
"learning_rate": 2.4022222222222225e-06, |
|
"loss": 0.3649, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 26.69, |
|
"learning_rate": 2.346666666666667e-06, |
|
"loss": 0.3235, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 26.86, |
|
"learning_rate": 2.2911111111111114e-06, |
|
"loss": 0.5641, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 27.03, |
|
"learning_rate": 2.235555555555556e-06, |
|
"loss": 0.3464, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 27.03, |
|
"eval_loss": 9.949957847595215, |
|
"eval_runtime": 246.2029, |
|
"eval_samples_per_second": 10.004, |
|
"eval_steps_per_second": 0.626, |
|
"eval_wer": 52.121290994273814, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 27.2, |
|
"learning_rate": 2.1800000000000003e-06, |
|
"loss": 0.237, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 27.36, |
|
"learning_rate": 2.1244444444444443e-06, |
|
"loss": 0.2766, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 27.53, |
|
"learning_rate": 2.0688888888888892e-06, |
|
"loss": 0.2813, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 27.7, |
|
"learning_rate": 2.0133333333333337e-06, |
|
"loss": 0.1791, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 27.87, |
|
"learning_rate": 1.9577777777777777e-06, |
|
"loss": 0.2774, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 28.04, |
|
"learning_rate": 1.9022222222222222e-06, |
|
"loss": 0.2763, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 28.21, |
|
"learning_rate": 1.8466666666666668e-06, |
|
"loss": 0.1649, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 28.38, |
|
"learning_rate": 1.7911111111111113e-06, |
|
"loss": 0.3234, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 28.55, |
|
"learning_rate": 1.7355555555555555e-06, |
|
"loss": 0.3068, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 28.72, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 0.2139, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 28.89, |
|
"learning_rate": 1.6244444444444447e-06, |
|
"loss": 0.3518, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 29.05, |
|
"learning_rate": 1.568888888888889e-06, |
|
"loss": 0.321, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 29.22, |
|
"learning_rate": 1.5133333333333334e-06, |
|
"loss": 0.0825, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 29.39, |
|
"learning_rate": 1.457777777777778e-06, |
|
"loss": 0.1898, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 29.56, |
|
"learning_rate": 1.4022222222222223e-06, |
|
"loss": 0.219, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 29.73, |
|
"learning_rate": 1.3466666666666668e-06, |
|
"loss": 0.1058, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 29.9, |
|
"learning_rate": 1.2911111111111112e-06, |
|
"loss": 0.1666, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 30.07, |
|
"learning_rate": 1.2355555555555557e-06, |
|
"loss": 0.1786, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 30.24, |
|
"learning_rate": 1.1800000000000001e-06, |
|
"loss": 0.0728, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 30.41, |
|
"learning_rate": 1.1244444444444446e-06, |
|
"loss": 0.1202, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 30.57, |
|
"learning_rate": 1.068888888888889e-06, |
|
"loss": 0.1826, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 30.74, |
|
"learning_rate": 1.0133333333333333e-06, |
|
"loss": 0.1013, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 30.91, |
|
"learning_rate": 9.57777777777778e-07, |
|
"loss": 0.1335, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 31.08, |
|
"learning_rate": 9.022222222222222e-07, |
|
"loss": 0.062, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 31.25, |
|
"learning_rate": 8.466666666666668e-07, |
|
"loss": 0.0308, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 31.42, |
|
"learning_rate": 7.911111111111111e-07, |
|
"loss": 0.0679, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 31.59, |
|
"learning_rate": 7.355555555555556e-07, |
|
"loss": 0.0583, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 31.76, |
|
"learning_rate": 6.800000000000001e-07, |
|
"loss": 0.044, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 31.93, |
|
"learning_rate": 6.244444444444445e-07, |
|
"loss": 0.047, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 32.09, |
|
"learning_rate": 5.68888888888889e-07, |
|
"loss": 0.0376, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 32.26, |
|
"learning_rate": 5.133333333333334e-07, |
|
"loss": 0.0268, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 32.43, |
|
"learning_rate": 4.5777777777777784e-07, |
|
"loss": 0.0392, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 32.6, |
|
"learning_rate": 4.0222222222222224e-07, |
|
"loss": 0.0264, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 32.77, |
|
"learning_rate": 3.466666666666667e-07, |
|
"loss": 0.0248, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 32.94, |
|
"learning_rate": 2.9111111111111116e-07, |
|
"loss": 0.0182, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 33.11, |
|
"learning_rate": 2.3555555555555556e-07, |
|
"loss": 0.0149, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 33.28, |
|
"learning_rate": 1.8e-07, |
|
"loss": 0.0076, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 33.45, |
|
"learning_rate": 1.2444444444444446e-07, |
|
"loss": 0.0084, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 33.61, |
|
"learning_rate": 6.888888888888889e-08, |
|
"loss": 0.0159, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 33.78, |
|
"learning_rate": 1.3333333333333334e-08, |
|
"loss": 0.0122, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 33.78, |
|
"eval_loss": 9.061223030090332, |
|
"eval_runtime": 246.5625, |
|
"eval_samples_per_second": 9.989, |
|
"eval_steps_per_second": 0.625, |
|
"eval_wer": 44.67291341315287, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 33.78, |
|
"step": 5000, |
|
"total_flos": 4.598319781272512e+19, |
|
"train_loss": 2.2148335320025683, |
|
"train_runtime": 12302.5688, |
|
"train_samples_per_second": 13.005, |
|
"train_steps_per_second": 0.406 |
|
} |
|
], |
|
"max_steps": 5000, |
|
"num_train_epochs": 34, |
|
"total_flos": 4.598319781272512e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|