|
{ |
|
"best_metric": 17.94694428111922, |
|
"best_model_checkpoint": "./checkpoint-5000", |
|
"epoch": 58.13953488372093, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.0500000000000001e-06, |
|
"loss": 10.5572, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.3e-06, |
|
"loss": 8.9397, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.55e-06, |
|
"loss": 7.3274, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 6.2977, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 6.0500000000000005e-06, |
|
"loss": 5.6656, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.2999999999999996e-06, |
|
"loss": 5.3192, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.550000000000001e-06, |
|
"loss": 5.0393, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 4.7781, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.1050000000000001e-05, |
|
"loss": 4.0512, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.23e-05, |
|
"loss": 3.0152, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.3550000000000002e-05, |
|
"loss": 2.2934, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.48e-05, |
|
"loss": 1.8781, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.605e-05, |
|
"loss": 1.6424, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.73e-05, |
|
"loss": 1.4523, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.855e-05, |
|
"loss": 1.2698, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.9800000000000004e-05, |
|
"loss": 1.1583, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.105e-05, |
|
"loss": 1.0625, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.23e-05, |
|
"loss": 0.9448, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.355e-05, |
|
"loss": 0.883, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.48e-05, |
|
"loss": 0.8449, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 2.4883333333333333e-05, |
|
"loss": 0.7883, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 2.4744444444444445e-05, |
|
"loss": 0.6962, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 2.4605555555555558e-05, |
|
"loss": 0.6835, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 2.4466666666666667e-05, |
|
"loss": 0.6745, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 2.432777777777778e-05, |
|
"loss": 0.5938, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 2.418888888888889e-05, |
|
"loss": 0.573, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 2.4050000000000002e-05, |
|
"loss": 0.5678, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 2.391111111111111e-05, |
|
"loss": 0.5257, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 2.3772222222222224e-05, |
|
"loss": 0.4938, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 2.3633333333333336e-05, |
|
"loss": 0.4766, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 2.3494444444444446e-05, |
|
"loss": 0.4733, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 2.3355555555555555e-05, |
|
"loss": 0.4272, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.3216666666666667e-05, |
|
"loss": 0.4259, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 2.307777777777778e-05, |
|
"loss": 0.414, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 2.293888888888889e-05, |
|
"loss": 0.3929, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 0.3739, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 2.2661111111111115e-05, |
|
"loss": 0.3643, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"learning_rate": 2.2522222222222224e-05, |
|
"loss": 0.3698, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 2.2383333333333333e-05, |
|
"loss": 0.3196, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 2.2244444444444446e-05, |
|
"loss": 0.3275, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"eval_loss": 0.4105360507965088, |
|
"eval_runtime": 74.8642, |
|
"eval_samples_per_second": 85.448, |
|
"eval_steps_per_second": 1.336, |
|
"eval_wer": 20.540933144260368, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"learning_rate": 2.2105555555555558e-05, |
|
"loss": 0.3297, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 2.1966666666666668e-05, |
|
"loss": 0.2975, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 2.1827777777777777e-05, |
|
"loss": 0.2921, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 2.168888888888889e-05, |
|
"loss": 0.2965, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 13.08, |
|
"learning_rate": 2.1550000000000002e-05, |
|
"loss": 0.2763, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 2.141111111111111e-05, |
|
"loss": 0.257, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"learning_rate": 2.1272222222222224e-05, |
|
"loss": 0.26, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 13.95, |
|
"learning_rate": 2.1133333333333337e-05, |
|
"loss": 0.2629, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 2.0994444444444446e-05, |
|
"loss": 0.2406, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 2.0855555555555555e-05, |
|
"loss": 0.2324, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 2.0716666666666668e-05, |
|
"loss": 0.2347, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 15.12, |
|
"learning_rate": 2.057777777777778e-05, |
|
"loss": 0.2232, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"learning_rate": 2.043888888888889e-05, |
|
"loss": 0.2101, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"learning_rate": 2.0300000000000002e-05, |
|
"loss": 0.2138, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 2.016111111111111e-05, |
|
"loss": 0.2106, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 16.28, |
|
"learning_rate": 2.0022222222222224e-05, |
|
"loss": 0.1863, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 16.57, |
|
"learning_rate": 1.9883333333333333e-05, |
|
"loss": 0.1975, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 16.86, |
|
"learning_rate": 1.9744444444444446e-05, |
|
"loss": 0.1903, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"learning_rate": 1.960555555555556e-05, |
|
"loss": 0.184, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"learning_rate": 1.9466666666666668e-05, |
|
"loss": 0.1708, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 17.73, |
|
"learning_rate": 1.9327777777777777e-05, |
|
"loss": 0.1712, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"learning_rate": 1.918888888888889e-05, |
|
"loss": 0.1784, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"learning_rate": 1.9050000000000002e-05, |
|
"loss": 0.1563, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 18.6, |
|
"learning_rate": 1.891111111111111e-05, |
|
"loss": 0.1609, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"learning_rate": 1.8772222222222224e-05, |
|
"loss": 0.1592, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"learning_rate": 1.8633333333333333e-05, |
|
"loss": 0.1485, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 1.8494444444444446e-05, |
|
"loss": 0.1476, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 19.77, |
|
"learning_rate": 1.8355555555555555e-05, |
|
"loss": 0.1446, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 20.06, |
|
"learning_rate": 1.8216666666666668e-05, |
|
"loss": 0.1447, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 20.35, |
|
"learning_rate": 1.807777777777778e-05, |
|
"loss": 0.1322, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 20.64, |
|
"learning_rate": 1.793888888888889e-05, |
|
"loss": 0.1326, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 20.93, |
|
"learning_rate": 1.78e-05, |
|
"loss": 0.1327, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 21.22, |
|
"learning_rate": 1.766111111111111e-05, |
|
"loss": 0.12, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 21.51, |
|
"learning_rate": 1.7522222222222224e-05, |
|
"loss": 0.1198, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"learning_rate": 1.7383333333333333e-05, |
|
"loss": 0.1214, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 22.09, |
|
"learning_rate": 1.7244444444444446e-05, |
|
"loss": 0.1173, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 22.38, |
|
"learning_rate": 1.7105555555555555e-05, |
|
"loss": 0.1109, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"learning_rate": 1.6966666666666668e-05, |
|
"loss": 0.1117, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 22.97, |
|
"learning_rate": 1.6827777777777777e-05, |
|
"loss": 0.1123, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 23.26, |
|
"learning_rate": 1.668888888888889e-05, |
|
"loss": 0.1016, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 23.26, |
|
"eval_loss": 0.4036574959754944, |
|
"eval_runtime": 73.4614, |
|
"eval_samples_per_second": 87.08, |
|
"eval_steps_per_second": 1.361, |
|
"eval_wer": 18.386193061944454, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 23.55, |
|
"learning_rate": 1.6550000000000002e-05, |
|
"loss": 0.1007, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 23.84, |
|
"learning_rate": 1.6411111111111112e-05, |
|
"loss": 0.1051, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 24.13, |
|
"learning_rate": 1.627222222222222e-05, |
|
"loss": 0.1012, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 24.42, |
|
"learning_rate": 1.6133333333333334e-05, |
|
"loss": 0.093, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 24.71, |
|
"learning_rate": 1.5994444444444446e-05, |
|
"loss": 0.0956, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 1.5855555555555555e-05, |
|
"loss": 0.0971, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 25.29, |
|
"learning_rate": 1.5716666666666668e-05, |
|
"loss": 0.0837, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 25.58, |
|
"learning_rate": 1.5577777777777777e-05, |
|
"loss": 0.0884, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 25.87, |
|
"learning_rate": 1.543888888888889e-05, |
|
"loss": 0.0871, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 26.16, |
|
"learning_rate": 1.53e-05, |
|
"loss": 0.0826, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 26.45, |
|
"learning_rate": 1.5161111111111112e-05, |
|
"loss": 0.0807, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 26.74, |
|
"learning_rate": 1.5022222222222224e-05, |
|
"loss": 0.0808, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 27.03, |
|
"learning_rate": 1.4883333333333335e-05, |
|
"loss": 0.0807, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 27.33, |
|
"learning_rate": 1.4744444444444445e-05, |
|
"loss": 0.0737, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 27.62, |
|
"learning_rate": 1.4605555555555556e-05, |
|
"loss": 0.0739, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 27.91, |
|
"learning_rate": 1.4466666666666667e-05, |
|
"loss": 0.0748, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 28.2, |
|
"learning_rate": 1.4327777777777779e-05, |
|
"loss": 0.0691, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 28.49, |
|
"learning_rate": 1.418888888888889e-05, |
|
"loss": 0.0711, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 28.78, |
|
"learning_rate": 1.4050000000000003e-05, |
|
"loss": 0.0682, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 29.07, |
|
"learning_rate": 1.391111111111111e-05, |
|
"loss": 0.067, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 29.36, |
|
"learning_rate": 1.3772222222222223e-05, |
|
"loss": 0.0655, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 29.65, |
|
"learning_rate": 1.3633333333333334e-05, |
|
"loss": 0.0635, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 29.94, |
|
"learning_rate": 1.3494444444444446e-05, |
|
"loss": 0.066, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 30.23, |
|
"learning_rate": 1.3355555555555557e-05, |
|
"loss": 0.0587, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 30.52, |
|
"learning_rate": 1.3216666666666667e-05, |
|
"loss": 0.0605, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 30.81, |
|
"learning_rate": 1.3077777777777778e-05, |
|
"loss": 0.0606, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 31.1, |
|
"learning_rate": 1.2938888888888888e-05, |
|
"loss": 0.0581, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 31.4, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.0544, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 31.69, |
|
"learning_rate": 1.2661111111111112e-05, |
|
"loss": 0.0568, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 31.98, |
|
"learning_rate": 1.2522222222222225e-05, |
|
"loss": 0.0567, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 32.27, |
|
"learning_rate": 1.2383333333333334e-05, |
|
"loss": 0.05, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 32.56, |
|
"learning_rate": 1.2244444444444445e-05, |
|
"loss": 0.0525, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 32.85, |
|
"learning_rate": 1.2105555555555556e-05, |
|
"loss": 0.0541, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 33.14, |
|
"learning_rate": 1.1966666666666668e-05, |
|
"loss": 0.0508, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 33.43, |
|
"learning_rate": 1.1827777777777778e-05, |
|
"loss": 0.0474, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 33.72, |
|
"learning_rate": 1.168888888888889e-05, |
|
"loss": 0.05, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 34.01, |
|
"learning_rate": 1.1550000000000001e-05, |
|
"loss": 0.0491, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 34.3, |
|
"learning_rate": 1.141111111111111e-05, |
|
"loss": 0.045, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 34.59, |
|
"learning_rate": 1.1272222222222223e-05, |
|
"loss": 0.0476, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 34.88, |
|
"learning_rate": 1.1133333333333334e-05, |
|
"loss": 0.0444, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 34.88, |
|
"eval_loss": 0.42896273732185364, |
|
"eval_runtime": 73.302, |
|
"eval_samples_per_second": 87.269, |
|
"eval_steps_per_second": 1.364, |
|
"eval_wer": 18.28589215923633, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 35.17, |
|
"learning_rate": 1.0994444444444445e-05, |
|
"loss": 0.0444, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 35.47, |
|
"learning_rate": 1.0855555555555556e-05, |
|
"loss": 0.0436, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 35.76, |
|
"learning_rate": 1.0716666666666667e-05, |
|
"loss": 0.0436, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 36.05, |
|
"learning_rate": 1.0577777777777778e-05, |
|
"loss": 0.0442, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 36.34, |
|
"learning_rate": 1.043888888888889e-05, |
|
"loss": 0.0396, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 36.63, |
|
"learning_rate": 1.03e-05, |
|
"loss": 0.0407, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 36.92, |
|
"learning_rate": 1.0161111111111112e-05, |
|
"loss": 0.04, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 37.21, |
|
"learning_rate": 1.0022222222222223e-05, |
|
"loss": 0.0391, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 37.5, |
|
"learning_rate": 9.883333333333334e-06, |
|
"loss": 0.0374, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 37.79, |
|
"learning_rate": 9.744444444444445e-06, |
|
"loss": 0.0399, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 38.08, |
|
"learning_rate": 9.605555555555556e-06, |
|
"loss": 0.0374, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 38.37, |
|
"learning_rate": 9.466666666666667e-06, |
|
"loss": 0.0363, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 38.66, |
|
"learning_rate": 9.327777777777778e-06, |
|
"loss": 0.0374, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 38.95, |
|
"learning_rate": 9.18888888888889e-06, |
|
"loss": 0.038, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 39.24, |
|
"learning_rate": 9.05e-06, |
|
"loss": 0.0343, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 39.53, |
|
"learning_rate": 8.911111111111112e-06, |
|
"loss": 0.0355, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 39.83, |
|
"learning_rate": 8.772222222222222e-06, |
|
"loss": 0.0372, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 40.12, |
|
"learning_rate": 8.633333333333334e-06, |
|
"loss": 0.0349, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 40.41, |
|
"learning_rate": 8.494444444444445e-06, |
|
"loss": 0.0325, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 40.7, |
|
"learning_rate": 8.355555555555556e-06, |
|
"loss": 0.0334, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 40.99, |
|
"learning_rate": 8.216666666666667e-06, |
|
"loss": 0.0347, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 41.28, |
|
"learning_rate": 8.077777777777778e-06, |
|
"loss": 0.0313, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 41.57, |
|
"learning_rate": 7.938888888888889e-06, |
|
"loss": 0.0321, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 41.86, |
|
"learning_rate": 7.8e-06, |
|
"loss": 0.0309, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 42.15, |
|
"learning_rate": 7.661111111111112e-06, |
|
"loss": 0.0313, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 42.44, |
|
"learning_rate": 7.5222222222222226e-06, |
|
"loss": 0.0293, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 42.73, |
|
"learning_rate": 7.3833333333333335e-06, |
|
"loss": 0.0319, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 43.02, |
|
"learning_rate": 7.244444444444445e-06, |
|
"loss": 0.0306, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 43.31, |
|
"learning_rate": 7.105555555555555e-06, |
|
"loss": 0.03, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 43.6, |
|
"learning_rate": 6.966666666666667e-06, |
|
"loss": 0.0299, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 43.9, |
|
"learning_rate": 6.827777777777779e-06, |
|
"loss": 0.03, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 44.19, |
|
"learning_rate": 6.688888888888889e-06, |
|
"loss": 0.0287, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 44.48, |
|
"learning_rate": 6.550000000000001e-06, |
|
"loss": 0.0279, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 44.77, |
|
"learning_rate": 6.411111111111111e-06, |
|
"loss": 0.0281, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 45.06, |
|
"learning_rate": 6.272222222222223e-06, |
|
"loss": 0.0284, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 45.35, |
|
"learning_rate": 6.133333333333334e-06, |
|
"loss": 0.027, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 45.64, |
|
"learning_rate": 5.9944444444444446e-06, |
|
"loss": 0.0261, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 45.93, |
|
"learning_rate": 5.8555555555555555e-06, |
|
"loss": 0.0276, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 46.22, |
|
"learning_rate": 5.7166666666666664e-06, |
|
"loss": 0.0264, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 46.51, |
|
"learning_rate": 5.577777777777778e-06, |
|
"loss": 0.0265, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 46.51, |
|
"eval_loss": 0.4462805688381195, |
|
"eval_runtime": 73.8032, |
|
"eval_samples_per_second": 86.676, |
|
"eval_steps_per_second": 1.355, |
|
"eval_wer": 18.01438799156089, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 46.8, |
|
"learning_rate": 5.438888888888889e-06, |
|
"loss": 0.0268, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 47.09, |
|
"learning_rate": 5.3e-06, |
|
"loss": 0.0262, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 47.38, |
|
"learning_rate": 5.161111111111112e-06, |
|
"loss": 0.0252, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 47.67, |
|
"learning_rate": 5.022222222222223e-06, |
|
"loss": 0.026, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 47.97, |
|
"learning_rate": 4.883333333333334e-06, |
|
"loss": 0.025, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 48.26, |
|
"learning_rate": 4.744444444444445e-06, |
|
"loss": 0.0244, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 48.55, |
|
"learning_rate": 4.605555555555556e-06, |
|
"loss": 0.0239, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 48.84, |
|
"learning_rate": 4.4666666666666665e-06, |
|
"loss": 0.0245, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 49.13, |
|
"learning_rate": 4.3277777777777775e-06, |
|
"loss": 0.024, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 49.42, |
|
"learning_rate": 4.188888888888889e-06, |
|
"loss": 0.0241, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 49.71, |
|
"learning_rate": 4.05e-06, |
|
"loss": 0.0238, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 3.911111111111111e-06, |
|
"loss": 0.0246, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 50.29, |
|
"learning_rate": 3.772222222222222e-06, |
|
"loss": 0.0227, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 50.58, |
|
"learning_rate": 3.633333333333334e-06, |
|
"loss": 0.0236, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 50.87, |
|
"learning_rate": 3.4944444444444448e-06, |
|
"loss": 0.0229, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 51.16, |
|
"learning_rate": 3.3555555555555557e-06, |
|
"loss": 0.0234, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 51.45, |
|
"learning_rate": 3.216666666666667e-06, |
|
"loss": 0.0229, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 51.74, |
|
"learning_rate": 3.077777777777778e-06, |
|
"loss": 0.0226, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 52.03, |
|
"learning_rate": 2.938888888888889e-06, |
|
"loss": 0.0221, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 52.33, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.0225, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 52.62, |
|
"learning_rate": 2.6611111111111112e-06, |
|
"loss": 0.0233, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 52.91, |
|
"learning_rate": 2.522222222222222e-06, |
|
"loss": 0.0214, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 53.2, |
|
"learning_rate": 2.3833333333333335e-06, |
|
"loss": 0.0221, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 53.49, |
|
"learning_rate": 2.2444444444444445e-06, |
|
"loss": 0.0227, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 53.78, |
|
"learning_rate": 2.105555555555556e-06, |
|
"loss": 0.0218, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 54.07, |
|
"learning_rate": 1.9666666666666668e-06, |
|
"loss": 0.0219, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 54.36, |
|
"learning_rate": 1.827777777777778e-06, |
|
"loss": 0.0214, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 54.65, |
|
"learning_rate": 1.6888888888888888e-06, |
|
"loss": 0.0223, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 54.94, |
|
"learning_rate": 1.55e-06, |
|
"loss": 0.0213, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 55.23, |
|
"learning_rate": 1.4111111111111111e-06, |
|
"loss": 0.0205, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 55.52, |
|
"learning_rate": 1.2722222222222223e-06, |
|
"loss": 0.0206, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 55.81, |
|
"learning_rate": 1.1333333333333334e-06, |
|
"loss": 0.021, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 56.1, |
|
"learning_rate": 9.944444444444446e-07, |
|
"loss": 0.0211, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 56.4, |
|
"learning_rate": 8.555555555555556e-07, |
|
"loss": 0.0213, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 56.69, |
|
"learning_rate": 7.166666666666667e-07, |
|
"loss": 0.021, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 56.98, |
|
"learning_rate": 5.777777777777778e-07, |
|
"loss": 0.0201, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 57.27, |
|
"learning_rate": 4.3888888888888895e-07, |
|
"loss": 0.0218, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 57.56, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 0.0212, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 57.85, |
|
"learning_rate": 1.611111111111111e-07, |
|
"loss": 0.0202, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 58.14, |
|
"learning_rate": 2.2222222222222224e-08, |
|
"loss": 0.0213, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 58.14, |
|
"eval_loss": 0.44943663477897644, |
|
"eval_runtime": 73.6911, |
|
"eval_samples_per_second": 86.808, |
|
"eval_steps_per_second": 1.357, |
|
"eval_wer": 17.94694428111922, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 58.14, |
|
"step": 5000, |
|
"total_flos": 4.129597039509504e+19, |
|
"train_loss": 0.4845314011693001, |
|
"train_runtime": 4394.9084, |
|
"train_samples_per_second": 145.623, |
|
"train_steps_per_second": 1.138 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 59, |
|
"save_steps": 1000, |
|
"total_flos": 4.129597039509504e+19, |
|
"train_batch_size": 128, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|