|
{ |
|
"best_metric": 15.015790814663829, |
|
"best_model_checkpoint": "./whisper-small-mix-fr/checkpoint-5000", |
|
"epoch": 1.0, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005, |
|
"grad_norm": 10.08858585357666, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 0.9513, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.1840314865112305, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 0.7499, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.015, |
|
"grad_norm": 4.318074703216553, |
|
"learning_rate": 1.46e-06, |
|
"loss": 0.5574, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.8745129108428955, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 0.3836, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.025, |
|
"grad_norm": 3.0574047565460205, |
|
"learning_rate": 2.46e-06, |
|
"loss": 0.3209, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.6434690952301025, |
|
"learning_rate": 2.96e-06, |
|
"loss": 0.3555, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.035, |
|
"grad_norm": 3.391029119491577, |
|
"learning_rate": 3.46e-06, |
|
"loss": 0.3315, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.6835551261901855, |
|
"learning_rate": 3.96e-06, |
|
"loss": 0.325, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.045, |
|
"grad_norm": 2.660156011581421, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 0.3011, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.9171078205108643, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 0.2927, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.055, |
|
"grad_norm": 2.8251867294311523, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 0.2897, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.700000762939453, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 0.2666, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.065, |
|
"grad_norm": 2.6077873706817627, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 0.2599, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.5077693462371826, |
|
"learning_rate": 6.96e-06, |
|
"loss": 0.2465, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.075, |
|
"grad_norm": 2.5103046894073486, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 0.2513, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.961869716644287, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 0.2471, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.085, |
|
"grad_norm": 2.522207498550415, |
|
"learning_rate": 8.46e-06, |
|
"loss": 0.2353, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.2522737979888916, |
|
"learning_rate": 8.96e-06, |
|
"loss": 0.2116, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.095, |
|
"grad_norm": 2.7229421138763428, |
|
"learning_rate": 9.460000000000001e-06, |
|
"loss": 0.2182, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.5425918102264404, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 0.239, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.105, |
|
"grad_norm": 2.1395082473754883, |
|
"learning_rate": 9.94888888888889e-06, |
|
"loss": 0.2252, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.3434157371520996, |
|
"learning_rate": 9.893333333333334e-06, |
|
"loss": 0.2193, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.115, |
|
"grad_norm": 2.3833093643188477, |
|
"learning_rate": 9.837777777777778e-06, |
|
"loss": 0.2198, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.546076536178589, |
|
"learning_rate": 9.782222222222222e-06, |
|
"loss": 0.3714, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 2.898831605911255, |
|
"learning_rate": 9.726666666666668e-06, |
|
"loss": 0.4111, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.033583164215088, |
|
"learning_rate": 9.671111111111112e-06, |
|
"loss": 0.3474, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.135, |
|
"grad_norm": 2.8390543460845947, |
|
"learning_rate": 9.615555555555558e-06, |
|
"loss": 0.2983, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.839262008666992, |
|
"learning_rate": 9.56e-06, |
|
"loss": 0.3226, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.145, |
|
"grad_norm": 2.5362398624420166, |
|
"learning_rate": 9.504444444444446e-06, |
|
"loss": 0.2798, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.3447251319885254, |
|
"learning_rate": 9.44888888888889e-06, |
|
"loss": 0.2298, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.155, |
|
"grad_norm": 2.498056411743164, |
|
"learning_rate": 9.393333333333334e-06, |
|
"loss": 0.2775, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.353714942932129, |
|
"learning_rate": 9.33777777777778e-06, |
|
"loss": 0.2355, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.165, |
|
"grad_norm": 2.372309923171997, |
|
"learning_rate": 9.282222222222222e-06, |
|
"loss": 0.2285, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.3350610733032227, |
|
"learning_rate": 9.226666666666668e-06, |
|
"loss": 0.2159, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.175, |
|
"grad_norm": 2.5473477840423584, |
|
"learning_rate": 9.171111111111112e-06, |
|
"loss": 0.1899, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.363154649734497, |
|
"learning_rate": 9.115555555555556e-06, |
|
"loss": 0.2044, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.185, |
|
"grad_norm": 1.9945369958877563, |
|
"learning_rate": 9.060000000000001e-06, |
|
"loss": 0.1781, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.254298448562622, |
|
"learning_rate": 9.004444444444445e-06, |
|
"loss": 0.1707, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.195, |
|
"grad_norm": 2.3024325370788574, |
|
"learning_rate": 8.94888888888889e-06, |
|
"loss": 0.1829, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.001993417739868, |
|
"learning_rate": 8.893333333333333e-06, |
|
"loss": 0.187, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 0.3652883768081665, |
|
"eval_runtime": 728.8609, |
|
"eval_samples_per_second": 22.17, |
|
"eval_steps_per_second": 2.771, |
|
"eval_wer": 17.349750250342183, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.205, |
|
"grad_norm": 2.180095672607422, |
|
"learning_rate": 8.83777777777778e-06, |
|
"loss": 0.1777, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.2858028411865234, |
|
"learning_rate": 8.782222222222223e-06, |
|
"loss": 0.1726, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.215, |
|
"grad_norm": 1.9933851957321167, |
|
"learning_rate": 8.726666666666667e-06, |
|
"loss": 0.1802, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.7360317707061768, |
|
"learning_rate": 8.671111111111113e-06, |
|
"loss": 0.1673, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.225, |
|
"grad_norm": 2.6692724227905273, |
|
"learning_rate": 8.615555555555555e-06, |
|
"loss": 0.1633, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.456207275390625, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 0.2043, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.235, |
|
"grad_norm": 2.5965209007263184, |
|
"learning_rate": 8.504444444444445e-06, |
|
"loss": 0.1786, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.5705783367156982, |
|
"learning_rate": 8.448888888888889e-06, |
|
"loss": 0.2001, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.245, |
|
"grad_norm": 2.3910598754882812, |
|
"learning_rate": 8.393333333333335e-06, |
|
"loss": 0.2014, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 2.1060574054718018, |
|
"learning_rate": 8.337777777777777e-06, |
|
"loss": 0.1898, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.255, |
|
"grad_norm": 1.8726638555526733, |
|
"learning_rate": 8.282222222222223e-06, |
|
"loss": 0.1763, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.0807666778564453, |
|
"learning_rate": 8.226666666666667e-06, |
|
"loss": 0.1841, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.265, |
|
"grad_norm": 2.084286689758301, |
|
"learning_rate": 8.171111111111113e-06, |
|
"loss": 0.1675, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 2.35015606880188, |
|
"learning_rate": 8.115555555555557e-06, |
|
"loss": 0.1623, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.275, |
|
"grad_norm": 2.1124699115753174, |
|
"learning_rate": 8.06e-06, |
|
"loss": 0.1607, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.906491994857788, |
|
"learning_rate": 8.004444444444445e-06, |
|
"loss": 0.1542, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.285, |
|
"grad_norm": 2.005571126937866, |
|
"learning_rate": 7.948888888888889e-06, |
|
"loss": 0.1749, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.8412165641784668, |
|
"learning_rate": 7.893333333333335e-06, |
|
"loss": 0.176, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.295, |
|
"grad_norm": 2.491558313369751, |
|
"learning_rate": 7.837777777777779e-06, |
|
"loss": 0.1645, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.0074329376220703, |
|
"learning_rate": 7.782222222222223e-06, |
|
"loss": 0.1654, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.305, |
|
"grad_norm": 2.0252339839935303, |
|
"learning_rate": 7.726666666666667e-06, |
|
"loss": 0.1769, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.3819239139556885, |
|
"learning_rate": 7.67111111111111e-06, |
|
"loss": 0.1534, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.315, |
|
"grad_norm": 2.0175583362579346, |
|
"learning_rate": 7.6155555555555564e-06, |
|
"loss": 0.1553, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.1074907779693604, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 0.1659, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.325, |
|
"grad_norm": 1.6283425092697144, |
|
"learning_rate": 7.504444444444445e-06, |
|
"loss": 0.1646, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 2.038062810897827, |
|
"learning_rate": 7.44888888888889e-06, |
|
"loss": 0.1689, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.335, |
|
"grad_norm": 2.047210931777954, |
|
"learning_rate": 7.393333333333333e-06, |
|
"loss": 0.1802, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 2.040024757385254, |
|
"learning_rate": 7.337777777777778e-06, |
|
"loss": 0.1665, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.345, |
|
"grad_norm": 1.7998043298721313, |
|
"learning_rate": 7.282222222222222e-06, |
|
"loss": 0.1586, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.252765417098999, |
|
"learning_rate": 7.226666666666667e-06, |
|
"loss": 0.1361, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.355, |
|
"grad_norm": 2.491612672805786, |
|
"learning_rate": 7.171111111111112e-06, |
|
"loss": 0.183, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.3502893447875977, |
|
"learning_rate": 7.115555555555557e-06, |
|
"loss": 0.2771, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.365, |
|
"grad_norm": 1.9661980867385864, |
|
"learning_rate": 7.06e-06, |
|
"loss": 0.219, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 2.2753665447235107, |
|
"learning_rate": 7.004444444444445e-06, |
|
"loss": 0.1976, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 2.1407172679901123, |
|
"learning_rate": 6.948888888888889e-06, |
|
"loss": 0.2095, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.26143479347229, |
|
"learning_rate": 6.893333333333334e-06, |
|
"loss": 0.1887, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.385, |
|
"grad_norm": 1.8551641702651978, |
|
"learning_rate": 6.837777777777779e-06, |
|
"loss": 0.1815, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.3487014770507812, |
|
"learning_rate": 6.782222222222222e-06, |
|
"loss": 0.1885, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.395, |
|
"grad_norm": 1.6881073713302612, |
|
"learning_rate": 6.726666666666667e-06, |
|
"loss": 0.141, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.776225209236145, |
|
"learning_rate": 6.671111111111112e-06, |
|
"loss": 0.1445, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 0.337929904460907, |
|
"eval_runtime": 715.6286, |
|
"eval_samples_per_second": 22.58, |
|
"eval_steps_per_second": 2.823, |
|
"eval_wer": 16.047970895128845, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.405, |
|
"grad_norm": 1.7904995679855347, |
|
"learning_rate": 6.615555555555556e-06, |
|
"loss": 0.155, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.136061191558838, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 0.2267, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.415, |
|
"grad_norm": 1.9061017036437988, |
|
"learning_rate": 6.504444444444446e-06, |
|
"loss": 0.2217, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 2.478090524673462, |
|
"learning_rate": 6.448888888888889e-06, |
|
"loss": 0.2131, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.425, |
|
"grad_norm": 1.8599025011062622, |
|
"learning_rate": 6.393333333333334e-06, |
|
"loss": 0.2291, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.7993838787078857, |
|
"learning_rate": 6.3377777777777786e-06, |
|
"loss": 0.2152, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.435, |
|
"grad_norm": 1.959346890449524, |
|
"learning_rate": 6.282222222222223e-06, |
|
"loss": 0.2315, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 2.289241313934326, |
|
"learning_rate": 6.2266666666666675e-06, |
|
"loss": 0.2424, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.445, |
|
"grad_norm": 2.3349103927612305, |
|
"learning_rate": 6.171111111111112e-06, |
|
"loss": 0.2149, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.915467381477356, |
|
"learning_rate": 6.1155555555555555e-06, |
|
"loss": 0.1946, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.455, |
|
"grad_norm": 1.8727192878723145, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 0.195, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.938673734664917, |
|
"learning_rate": 6.004444444444445e-06, |
|
"loss": 0.1997, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.465, |
|
"grad_norm": 2.3887195587158203, |
|
"learning_rate": 5.948888888888889e-06, |
|
"loss": 0.2204, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.328936815261841, |
|
"learning_rate": 5.893333333333334e-06, |
|
"loss": 0.2219, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.475, |
|
"grad_norm": 1.9731664657592773, |
|
"learning_rate": 5.837777777777777e-06, |
|
"loss": 0.1999, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.532160520553589, |
|
"learning_rate": 5.782222222222222e-06, |
|
"loss": 0.1592, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.485, |
|
"grad_norm": 2.0180513858795166, |
|
"learning_rate": 5.726666666666667e-06, |
|
"loss": 0.1483, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 2.120114326477051, |
|
"learning_rate": 5.671111111111112e-06, |
|
"loss": 0.1432, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.495, |
|
"grad_norm": 2.1796326637268066, |
|
"learning_rate": 5.615555555555556e-06, |
|
"loss": 0.1409, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 2.18973708152771, |
|
"learning_rate": 5.560000000000001e-06, |
|
"loss": 0.1423, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.505, |
|
"grad_norm": 1.9231557846069336, |
|
"learning_rate": 5.504444444444444e-06, |
|
"loss": 0.1392, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.988946795463562, |
|
"learning_rate": 5.448888888888889e-06, |
|
"loss": 0.171, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.515, |
|
"grad_norm": 2.1168086528778076, |
|
"learning_rate": 5.393333333333334e-06, |
|
"loss": 0.1573, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 2.9249277114868164, |
|
"learning_rate": 5.337777777777779e-06, |
|
"loss": 0.1947, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.525, |
|
"grad_norm": 1.9734596014022827, |
|
"learning_rate": 5.282222222222223e-06, |
|
"loss": 0.2473, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 2.1487271785736084, |
|
"learning_rate": 5.226666666666667e-06, |
|
"loss": 0.2281, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.535, |
|
"grad_norm": 2.1245877742767334, |
|
"learning_rate": 5.171111111111111e-06, |
|
"loss": 0.2103, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 2.845296859741211, |
|
"learning_rate": 5.115555555555556e-06, |
|
"loss": 0.1695, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.545, |
|
"grad_norm": 1.894158124923706, |
|
"learning_rate": 5.060000000000001e-06, |
|
"loss": 0.1626, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.936550259590149, |
|
"learning_rate": 5.004444444444445e-06, |
|
"loss": 0.1296, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.555, |
|
"grad_norm": 1.6843122243881226, |
|
"learning_rate": 4.94888888888889e-06, |
|
"loss": 0.141, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.5642544031143188, |
|
"learning_rate": 4.893333333333334e-06, |
|
"loss": 0.1503, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.565, |
|
"grad_norm": 1.9369734525680542, |
|
"learning_rate": 4.837777777777778e-06, |
|
"loss": 0.1251, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 2.033722400665283, |
|
"learning_rate": 4.7822222222222226e-06, |
|
"loss": 0.1464, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.575, |
|
"grad_norm": 1.9096460342407227, |
|
"learning_rate": 4.7266666666666674e-06, |
|
"loss": 0.1292, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 2.1668505668640137, |
|
"learning_rate": 4.6711111111111115e-06, |
|
"loss": 0.1495, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.585, |
|
"grad_norm": 1.7556490898132324, |
|
"learning_rate": 4.6155555555555555e-06, |
|
"loss": 0.1201, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.806643009185791, |
|
"learning_rate": 4.56e-06, |
|
"loss": 0.1559, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.595, |
|
"grad_norm": 2.2240452766418457, |
|
"learning_rate": 4.504444444444444e-06, |
|
"loss": 0.1916, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 2.006112813949585, |
|
"learning_rate": 4.448888888888889e-06, |
|
"loss": 0.1659, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_loss": 0.3254563808441162, |
|
"eval_runtime": 713.8266, |
|
"eval_samples_per_second": 22.637, |
|
"eval_steps_per_second": 2.83, |
|
"eval_wer": 15.377231600590155, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.605, |
|
"grad_norm": 2.583686590194702, |
|
"learning_rate": 4.393333333333334e-06, |
|
"loss": 0.18, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.474371910095215, |
|
"learning_rate": 4.337777777777778e-06, |
|
"loss": 0.1708, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.615, |
|
"grad_norm": 1.7476117610931396, |
|
"learning_rate": 4.282222222222222e-06, |
|
"loss": 0.1694, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.8390570878982544, |
|
"learning_rate": 4.226666666666667e-06, |
|
"loss": 0.1527, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 2.105689287185669, |
|
"learning_rate": 4.171111111111111e-06, |
|
"loss": 0.1589, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.7435396909713745, |
|
"learning_rate": 4.115555555555556e-06, |
|
"loss": 0.142, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.635, |
|
"grad_norm": 1.6353161334991455, |
|
"learning_rate": 4.060000000000001e-06, |
|
"loss": 0.1432, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.6273831129074097, |
|
"learning_rate": 4.004444444444445e-06, |
|
"loss": 0.1357, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.645, |
|
"grad_norm": 1.8896713256835938, |
|
"learning_rate": 3.948888888888889e-06, |
|
"loss": 0.1283, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.4946659803390503, |
|
"learning_rate": 3.893333333333333e-06, |
|
"loss": 0.1138, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.655, |
|
"grad_norm": 1.7483470439910889, |
|
"learning_rate": 3.837777777777778e-06, |
|
"loss": 0.1196, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.8768987655639648, |
|
"learning_rate": 3.782222222222223e-06, |
|
"loss": 0.1269, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.665, |
|
"grad_norm": 1.7175124883651733, |
|
"learning_rate": 3.726666666666667e-06, |
|
"loss": 0.146, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.719070553779602, |
|
"learning_rate": 3.6711111111111113e-06, |
|
"loss": 0.1364, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.675, |
|
"grad_norm": 1.8970329761505127, |
|
"learning_rate": 3.615555555555556e-06, |
|
"loss": 0.1345, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.9742193222045898, |
|
"learning_rate": 3.5600000000000002e-06, |
|
"loss": 0.1271, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.685, |
|
"grad_norm": 1.763426423072815, |
|
"learning_rate": 3.5044444444444447e-06, |
|
"loss": 0.1366, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 2.260113477706909, |
|
"learning_rate": 3.4488888888888896e-06, |
|
"loss": 0.151, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.695, |
|
"grad_norm": 1.8039120435714722, |
|
"learning_rate": 3.3933333333333336e-06, |
|
"loss": 0.1399, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.8737797737121582, |
|
"learning_rate": 3.337777777777778e-06, |
|
"loss": 0.1357, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.705, |
|
"grad_norm": 1.9594708681106567, |
|
"learning_rate": 3.282222222222223e-06, |
|
"loss": 0.1381, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 2.234194278717041, |
|
"learning_rate": 3.226666666666667e-06, |
|
"loss": 0.1216, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.715, |
|
"grad_norm": 2.309079647064209, |
|
"learning_rate": 3.1711111111111114e-06, |
|
"loss": 0.209, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.2596418857574463, |
|
"learning_rate": 3.1155555555555555e-06, |
|
"loss": 0.1934, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.725, |
|
"grad_norm": 2.279223918914795, |
|
"learning_rate": 3.0600000000000003e-06, |
|
"loss": 0.2069, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 1.8627978563308716, |
|
"learning_rate": 3.004444444444445e-06, |
|
"loss": 0.2093, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.735, |
|
"grad_norm": 2.2204179763793945, |
|
"learning_rate": 2.948888888888889e-06, |
|
"loss": 0.2281, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.9367740154266357, |
|
"learning_rate": 2.8933333333333337e-06, |
|
"loss": 0.2236, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.745, |
|
"grad_norm": 2.149648427963257, |
|
"learning_rate": 2.837777777777778e-06, |
|
"loss": 0.2187, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.0620782375335693, |
|
"learning_rate": 2.7822222222222222e-06, |
|
"loss": 0.2031, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.755, |
|
"grad_norm": 1.87172532081604, |
|
"learning_rate": 2.726666666666667e-06, |
|
"loss": 0.185, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.9918657541275024, |
|
"learning_rate": 2.6711111111111116e-06, |
|
"loss": 0.1866, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.765, |
|
"grad_norm": 1.9320180416107178, |
|
"learning_rate": 2.6155555555555556e-06, |
|
"loss": 0.1862, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.31062650680542, |
|
"learning_rate": 2.56e-06, |
|
"loss": 0.2484, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.775, |
|
"grad_norm": 2.489675283432007, |
|
"learning_rate": 2.504444444444445e-06, |
|
"loss": 0.2913, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.7794305086135864, |
|
"learning_rate": 2.448888888888889e-06, |
|
"loss": 0.2571, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.785, |
|
"grad_norm": 1.6303505897521973, |
|
"learning_rate": 2.3933333333333334e-06, |
|
"loss": 0.1488, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.9400599002838135, |
|
"learning_rate": 2.337777777777778e-06, |
|
"loss": 0.1121, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.795, |
|
"grad_norm": 1.8359787464141846, |
|
"learning_rate": 2.2822222222222223e-06, |
|
"loss": 0.1516, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.1083908081054688, |
|
"learning_rate": 2.226666666666667e-06, |
|
"loss": 0.1594, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"eval_loss": 0.31362661719322205, |
|
"eval_runtime": 714.1383, |
|
"eval_samples_per_second": 22.627, |
|
"eval_steps_per_second": 2.829, |
|
"eval_wer": 15.195918681748424, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.805, |
|
"grad_norm": 2.0564794540405273, |
|
"learning_rate": 2.1711111111111113e-06, |
|
"loss": 0.1503, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.9138249158859253, |
|
"learning_rate": 2.1155555555555557e-06, |
|
"loss": 0.1554, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.815, |
|
"grad_norm": 2.1700758934020996, |
|
"learning_rate": 2.06e-06, |
|
"loss": 0.1744, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.3517487049102783, |
|
"learning_rate": 2.0044444444444446e-06, |
|
"loss": 0.1572, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.825, |
|
"grad_norm": 2.2419686317443848, |
|
"learning_rate": 1.948888888888889e-06, |
|
"loss": 0.1772, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.8083751201629639, |
|
"learning_rate": 1.8933333333333333e-06, |
|
"loss": 0.1927, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.835, |
|
"grad_norm": 1.8185911178588867, |
|
"learning_rate": 1.837777777777778e-06, |
|
"loss": 0.1648, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.9195200204849243, |
|
"learning_rate": 1.7822222222222225e-06, |
|
"loss": 0.1487, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.845, |
|
"grad_norm": 1.9267535209655762, |
|
"learning_rate": 1.7266666666666667e-06, |
|
"loss": 0.1351, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.8880534172058105, |
|
"learning_rate": 1.6711111111111112e-06, |
|
"loss": 0.1314, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.855, |
|
"grad_norm": 1.6127861738204956, |
|
"learning_rate": 1.6155555555555559e-06, |
|
"loss": 0.1276, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.9241377115249634, |
|
"learning_rate": 1.56e-06, |
|
"loss": 0.1328, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.865, |
|
"grad_norm": 1.9935392141342163, |
|
"learning_rate": 1.5044444444444446e-06, |
|
"loss": 0.1336, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.5158283710479736, |
|
"learning_rate": 1.4488888888888892e-06, |
|
"loss": 0.1275, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 1.600599765777588, |
|
"learning_rate": 1.3933333333333335e-06, |
|
"loss": 0.1245, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.8442989587783813, |
|
"learning_rate": 1.337777777777778e-06, |
|
"loss": 0.1321, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.885, |
|
"grad_norm": 2.629706621170044, |
|
"learning_rate": 1.2822222222222222e-06, |
|
"loss": 0.2734, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.6115596294403076, |
|
"learning_rate": 1.2266666666666666e-06, |
|
"loss": 0.322, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.895, |
|
"grad_norm": 2.2008018493652344, |
|
"learning_rate": 1.171111111111111e-06, |
|
"loss": 0.3303, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.9677278995513916, |
|
"learning_rate": 1.1155555555555558e-06, |
|
"loss": 0.2398, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.905, |
|
"grad_norm": 1.6797411441802979, |
|
"learning_rate": 1.06e-06, |
|
"loss": 0.1363, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.017742156982422, |
|
"learning_rate": 1.0044444444444445e-06, |
|
"loss": 0.1638, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.915, |
|
"grad_norm": 2.1174466609954834, |
|
"learning_rate": 9.488888888888889e-07, |
|
"loss": 0.1516, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.573102593421936, |
|
"learning_rate": 8.933333333333334e-07, |
|
"loss": 0.1523, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.925, |
|
"grad_norm": 2.056255578994751, |
|
"learning_rate": 8.37777777777778e-07, |
|
"loss": 0.1539, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.6231415271759033, |
|
"learning_rate": 7.822222222222223e-07, |
|
"loss": 0.1549, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.935, |
|
"grad_norm": 1.890496850013733, |
|
"learning_rate": 7.266666666666668e-07, |
|
"loss": 0.1548, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.7783973217010498, |
|
"learning_rate": 6.711111111111111e-07, |
|
"loss": 0.1373, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.945, |
|
"grad_norm": 2.0187580585479736, |
|
"learning_rate": 6.155555555555556e-07, |
|
"loss": 0.15, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.9916925430297852, |
|
"learning_rate": 5.6e-07, |
|
"loss": 0.1395, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.955, |
|
"grad_norm": 1.9438104629516602, |
|
"learning_rate": 5.044444444444445e-07, |
|
"loss": 0.1187, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.21679425239563, |
|
"learning_rate": 4.488888888888889e-07, |
|
"loss": 0.1349, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.965, |
|
"grad_norm": 1.6693533658981323, |
|
"learning_rate": 3.9333333333333336e-07, |
|
"loss": 0.1444, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.4958335161209106, |
|
"learning_rate": 3.3777777777777777e-07, |
|
"loss": 0.1357, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.975, |
|
"grad_norm": 2.0575404167175293, |
|
"learning_rate": 2.822222222222222e-07, |
|
"loss": 0.1274, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.9877169132232666, |
|
"learning_rate": 2.266666666666667e-07, |
|
"loss": 0.125, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.985, |
|
"grad_norm": 2.0602986812591553, |
|
"learning_rate": 1.7111111111111114e-07, |
|
"loss": 0.1416, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.5087714195251465, |
|
"learning_rate": 1.1555555555555556e-07, |
|
"loss": 0.1439, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.995, |
|
"grad_norm": 2.067476749420166, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 0.1382, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.8874026536941528, |
|
"learning_rate": 4.444444444444445e-09, |
|
"loss": 0.1371, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.3092347979545593, |
|
"eval_runtime": 711.7473, |
|
"eval_samples_per_second": 22.703, |
|
"eval_steps_per_second": 2.838, |
|
"eval_wer": 15.015790814663829, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 5000, |
|
"total_flos": 9.23473281024e+19, |
|
"train_loss": 0.1970298689365387, |
|
"train_runtime": 18494.1427, |
|
"train_samples_per_second": 17.303, |
|
"train_steps_per_second": 0.27 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.23473281024e+19, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|