|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 20.0, |
|
"eval_steps": 100, |
|
"global_step": 2320, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08620689655172414, |
|
"grad_norm": 9.595185279846191, |
|
"learning_rate": 9e-07, |
|
"loss": 9.1142, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 9.732986450195312, |
|
"learning_rate": 1.9e-06, |
|
"loss": 8.3446, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.25862068965517243, |
|
"grad_norm": 14.272214889526367, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 8.6592, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 15.0160493850708, |
|
"learning_rate": 3.8e-06, |
|
"loss": 7.6985, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.43103448275862066, |
|
"grad_norm": 16.610979080200195, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 6.9688, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 17.26924705505371, |
|
"learning_rate": 5.8e-06, |
|
"loss": 6.232, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.603448275862069, |
|
"grad_norm": 11.347734451293945, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 4.7271, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 4.237112045288086, |
|
"learning_rate": 7.8e-06, |
|
"loss": 3.7919, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7758620689655172, |
|
"grad_norm": 1.8833028078079224, |
|
"learning_rate": 8.8e-06, |
|
"loss": 3.3967, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 1.3788093328475952, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 3.1618, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"eval_loss": 3.1117007732391357, |
|
"eval_runtime": 40.0512, |
|
"eval_samples_per_second": 33.557, |
|
"eval_steps_per_second": 33.557, |
|
"eval_wer": 1.0, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9482758620689655, |
|
"grad_norm": 1.729278802871704, |
|
"learning_rate": 1.08e-05, |
|
"loss": 3.0865, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0344827586206897, |
|
"grad_norm": 1.905969500541687, |
|
"learning_rate": 1.18e-05, |
|
"loss": 3.0809, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.1206896551724137, |
|
"grad_norm": 0.8360918760299683, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 3.0346, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 0.7653716206550598, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 3.0106, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.293103448275862, |
|
"grad_norm": 0.94779372215271, |
|
"learning_rate": 1.48e-05, |
|
"loss": 3.0165, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 0.8457741737365723, |
|
"learning_rate": 1.58e-05, |
|
"loss": 3.0, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.4655172413793103, |
|
"grad_norm": 1.4369837045669556, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 2.9903, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.5517241379310345, |
|
"grad_norm": 1.8290436267852783, |
|
"learning_rate": 1.78e-05, |
|
"loss": 2.9852, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.6379310344827587, |
|
"grad_norm": 1.1530190706253052, |
|
"learning_rate": 1.88e-05, |
|
"loss": 2.99, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 1.1261711120605469, |
|
"learning_rate": 1.9800000000000004e-05, |
|
"loss": 2.9798, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"eval_loss": 2.9736363887786865, |
|
"eval_runtime": 39.6236, |
|
"eval_samples_per_second": 33.919, |
|
"eval_steps_per_second": 33.919, |
|
"eval_wer": 1.0, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.8103448275862069, |
|
"grad_norm": 0.903380811214447, |
|
"learning_rate": 2.08e-05, |
|
"loss": 2.9718, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.896551724137931, |
|
"grad_norm": 0.4889620244503021, |
|
"learning_rate": 2.18e-05, |
|
"loss": 2.9766, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.9827586206896552, |
|
"grad_norm": 1.3861790895462036, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 2.9658, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.0689655172413794, |
|
"grad_norm": 0.7976490259170532, |
|
"learning_rate": 2.38e-05, |
|
"loss": 2.9588, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.1551724137931036, |
|
"grad_norm": 0.698798418045044, |
|
"learning_rate": 2.48e-05, |
|
"loss": 2.9523, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.2413793103448274, |
|
"grad_norm": 1.0858148336410522, |
|
"learning_rate": 2.58e-05, |
|
"loss": 2.9496, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.3275862068965516, |
|
"grad_norm": 0.5658290386199951, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 2.9421, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.413793103448276, |
|
"grad_norm": 0.5713534355163574, |
|
"learning_rate": 2.7800000000000005e-05, |
|
"loss": 2.9427, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.7386118769645691, |
|
"learning_rate": 2.88e-05, |
|
"loss": 2.9228, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.586206896551724, |
|
"grad_norm": 0.767816960811615, |
|
"learning_rate": 2.98e-05, |
|
"loss": 2.9144, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.586206896551724, |
|
"eval_loss": 2.9074809551239014, |
|
"eval_runtime": 39.8997, |
|
"eval_samples_per_second": 33.684, |
|
"eval_steps_per_second": 33.684, |
|
"eval_wer": 1.0, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.6724137931034484, |
|
"grad_norm": 0.8676608204841614, |
|
"learning_rate": 3.08e-05, |
|
"loss": 2.8965, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.7586206896551726, |
|
"grad_norm": 1.6954621076583862, |
|
"learning_rate": 3.18e-05, |
|
"loss": 2.8815, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.844827586206897, |
|
"grad_norm": 1.1631884574890137, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 2.855, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.9310344827586206, |
|
"grad_norm": 1.625454306602478, |
|
"learning_rate": 3.38e-05, |
|
"loss": 2.781, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.0172413793103448, |
|
"grad_norm": 2.0763564109802246, |
|
"learning_rate": 3.48e-05, |
|
"loss": 2.7756, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.103448275862069, |
|
"grad_norm": 2.036031723022461, |
|
"learning_rate": 3.58e-05, |
|
"loss": 2.6458, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.189655172413793, |
|
"grad_norm": 1.366801142692566, |
|
"learning_rate": 3.68e-05, |
|
"loss": 2.5189, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.2758620689655173, |
|
"grad_norm": 2.034527540206909, |
|
"learning_rate": 3.7800000000000004e-05, |
|
"loss": 2.433, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.3620689655172415, |
|
"grad_norm": 3.8338165283203125, |
|
"learning_rate": 3.88e-05, |
|
"loss": 2.2885, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.4482758620689653, |
|
"grad_norm": 2.3443217277526855, |
|
"learning_rate": 3.9800000000000005e-05, |
|
"loss": 2.1714, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.4482758620689653, |
|
"eval_loss": 2.0944502353668213, |
|
"eval_runtime": 39.7668, |
|
"eval_samples_per_second": 33.797, |
|
"eval_steps_per_second": 33.797, |
|
"eval_wer": 1.0325047801147227, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.5344827586206895, |
|
"grad_norm": 4.349735260009766, |
|
"learning_rate": 4.08e-05, |
|
"loss": 2.0881, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.6206896551724137, |
|
"grad_norm": 2.450747489929199, |
|
"learning_rate": 4.18e-05, |
|
"loss": 1.9522, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.706896551724138, |
|
"grad_norm": 2.2519729137420654, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 1.8395, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.793103448275862, |
|
"grad_norm": 2.693664789199829, |
|
"learning_rate": 4.38e-05, |
|
"loss": 1.7525, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.8793103448275863, |
|
"grad_norm": 1.9744929075241089, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 1.6222, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.9655172413793105, |
|
"grad_norm": 3.802494764328003, |
|
"learning_rate": 4.58e-05, |
|
"loss": 1.5397, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.051724137931035, |
|
"grad_norm": 2.301044225692749, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 1.4376, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.137931034482759, |
|
"grad_norm": 2.279372215270996, |
|
"learning_rate": 4.78e-05, |
|
"loss": 1.2829, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.224137931034483, |
|
"grad_norm": 3.314736843109131, |
|
"learning_rate": 4.88e-05, |
|
"loss": 1.1976, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.310344827586207, |
|
"grad_norm": 2.434694290161133, |
|
"learning_rate": 4.9800000000000004e-05, |
|
"loss": 1.1579, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.310344827586207, |
|
"eval_loss": 1.045101284980774, |
|
"eval_runtime": 39.7455, |
|
"eval_samples_per_second": 33.815, |
|
"eval_steps_per_second": 33.815, |
|
"eval_wer": 0.8299189656742239, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.396551724137931, |
|
"grad_norm": 1.8384031057357788, |
|
"learning_rate": 5.08e-05, |
|
"loss": 1.0684, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.482758620689655, |
|
"grad_norm": 3.599148988723755, |
|
"learning_rate": 5.1800000000000005e-05, |
|
"loss": 1.0319, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.568965517241379, |
|
"grad_norm": 2.066476583480835, |
|
"learning_rate": 5.28e-05, |
|
"loss": 0.9179, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.655172413793103, |
|
"grad_norm": 2.2173750400543213, |
|
"learning_rate": 5.380000000000001e-05, |
|
"loss": 0.8838, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.741379310344827, |
|
"grad_norm": 2.427091121673584, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 0.8991, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.827586206896552, |
|
"grad_norm": 2.7432241439819336, |
|
"learning_rate": 5.580000000000001e-05, |
|
"loss": 0.8, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.913793103448276, |
|
"grad_norm": 3.254221200942993, |
|
"learning_rate": 5.68e-05, |
|
"loss": 0.7803, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 4.457448482513428, |
|
"learning_rate": 5.7799999999999995e-05, |
|
"loss": 0.8205, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.086206896551724, |
|
"grad_norm": 3.1023166179656982, |
|
"learning_rate": 5.88e-05, |
|
"loss": 0.6703, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.172413793103448, |
|
"grad_norm": 2.5916504859924316, |
|
"learning_rate": 5.9800000000000003e-05, |
|
"loss": 0.6087, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.172413793103448, |
|
"eval_loss": 0.6753795146942139, |
|
"eval_runtime": 39.7485, |
|
"eval_samples_per_second": 33.813, |
|
"eval_steps_per_second": 33.813, |
|
"eval_wer": 0.6440863152144223, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.258620689655173, |
|
"grad_norm": 2.1707613468170166, |
|
"learning_rate": 6.08e-05, |
|
"loss": 0.6569, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 5.344827586206897, |
|
"grad_norm": 2.4291555881500244, |
|
"learning_rate": 6.18e-05, |
|
"loss": 0.5627, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 5.431034482758621, |
|
"grad_norm": 2.249617338180542, |
|
"learning_rate": 6.280000000000001e-05, |
|
"loss": 0.5381, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 5.517241379310345, |
|
"grad_norm": 1.6661946773529053, |
|
"learning_rate": 6.38e-05, |
|
"loss": 0.6338, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 5.603448275862069, |
|
"grad_norm": 2.60294771194458, |
|
"learning_rate": 6.48e-05, |
|
"loss": 0.5181, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 5.689655172413794, |
|
"grad_norm": 3.3003089427948, |
|
"learning_rate": 6.58e-05, |
|
"loss": 0.5189, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 5.775862068965517, |
|
"grad_norm": 1.880764126777649, |
|
"learning_rate": 6.680000000000001e-05, |
|
"loss": 0.564, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 5.862068965517241, |
|
"grad_norm": 2.0575127601623535, |
|
"learning_rate": 6.780000000000001e-05, |
|
"loss": 0.4729, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 5.948275862068965, |
|
"grad_norm": 2.5159761905670166, |
|
"learning_rate": 6.879999999999999e-05, |
|
"loss": 0.4899, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.0344827586206895, |
|
"grad_norm": 1.4463504552841187, |
|
"learning_rate": 6.98e-05, |
|
"loss": 0.481, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.0344827586206895, |
|
"eval_loss": 0.5275412201881409, |
|
"eval_runtime": 39.9601, |
|
"eval_samples_per_second": 33.634, |
|
"eval_steps_per_second": 33.634, |
|
"eval_wer": 0.5760721114449604, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.120689655172414, |
|
"grad_norm": 1.788765549659729, |
|
"learning_rate": 7.08e-05, |
|
"loss": 0.3865, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 6.206896551724138, |
|
"grad_norm": 1.862762212753296, |
|
"learning_rate": 7.18e-05, |
|
"loss": 0.3726, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 6.293103448275862, |
|
"grad_norm": 1.6512093544006348, |
|
"learning_rate": 7.280000000000001e-05, |
|
"loss": 0.4116, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 6.379310344827586, |
|
"grad_norm": 2.098067045211792, |
|
"learning_rate": 7.38e-05, |
|
"loss": 0.3779, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 6.4655172413793105, |
|
"grad_norm": 3.3030078411102295, |
|
"learning_rate": 7.48e-05, |
|
"loss": 0.3728, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 6.551724137931035, |
|
"grad_norm": 2.1799120903015137, |
|
"learning_rate": 7.58e-05, |
|
"loss": 0.4047, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 6.637931034482759, |
|
"grad_norm": 1.862434983253479, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 0.313, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 6.724137931034483, |
|
"grad_norm": 6.29113245010376, |
|
"learning_rate": 7.780000000000001e-05, |
|
"loss": 0.4052, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 6.810344827586206, |
|
"grad_norm": 1.4220325946807861, |
|
"learning_rate": 7.88e-05, |
|
"loss": 0.3218, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 6.896551724137931, |
|
"grad_norm": 2.586819648742676, |
|
"learning_rate": 7.98e-05, |
|
"loss": 0.3072, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 6.896551724137931, |
|
"eval_loss": 0.4836220443248749, |
|
"eval_runtime": 39.8762, |
|
"eval_samples_per_second": 33.704, |
|
"eval_steps_per_second": 33.704, |
|
"eval_wer": 0.5264499681325685, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 6.982758620689655, |
|
"grad_norm": 1.6589460372924805, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 0.3862, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 7.068965517241379, |
|
"grad_norm": 1.7299175262451172, |
|
"learning_rate": 8.18e-05, |
|
"loss": 0.2938, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 7.155172413793103, |
|
"grad_norm": 2.0545098781585693, |
|
"learning_rate": 8.28e-05, |
|
"loss": 0.249, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 7.241379310344827, |
|
"grad_norm": 24.935670852661133, |
|
"learning_rate": 8.38e-05, |
|
"loss": 0.3202, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 7.327586206896552, |
|
"grad_norm": 2.497840642929077, |
|
"learning_rate": 8.48e-05, |
|
"loss": 0.2803, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 7.413793103448276, |
|
"grad_norm": 2.698636531829834, |
|
"learning_rate": 8.58e-05, |
|
"loss": 0.2473, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 1.4561227560043335, |
|
"learning_rate": 8.680000000000001e-05, |
|
"loss": 0.3223, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 7.586206896551724, |
|
"grad_norm": 1.7760556936264038, |
|
"learning_rate": 8.78e-05, |
|
"loss": 0.2481, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 7.672413793103448, |
|
"grad_norm": 2.308103084564209, |
|
"learning_rate": 8.88e-05, |
|
"loss": 0.2545, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 7.758620689655173, |
|
"grad_norm": 1.4128385782241821, |
|
"learning_rate": 8.98e-05, |
|
"loss": 0.332, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 7.758620689655173, |
|
"eval_loss": 0.44030094146728516, |
|
"eval_runtime": 39.9401, |
|
"eval_samples_per_second": 33.65, |
|
"eval_steps_per_second": 33.65, |
|
"eval_wer": 0.5233542747883092, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 7.844827586206897, |
|
"grad_norm": 1.7903906106948853, |
|
"learning_rate": 9.080000000000001e-05, |
|
"loss": 0.2411, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 7.931034482758621, |
|
"grad_norm": 2.0804216861724854, |
|
"learning_rate": 9.180000000000001e-05, |
|
"loss": 0.2707, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 8.017241379310345, |
|
"grad_norm": 1.4420605897903442, |
|
"learning_rate": 9.28e-05, |
|
"loss": 0.3186, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 8.10344827586207, |
|
"grad_norm": 2.2910854816436768, |
|
"learning_rate": 9.38e-05, |
|
"loss": 0.1937, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 8.189655172413794, |
|
"grad_norm": 3.5892796516418457, |
|
"learning_rate": 9.48e-05, |
|
"loss": 0.2321, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 8.275862068965518, |
|
"grad_norm": 1.6509956121444702, |
|
"learning_rate": 9.58e-05, |
|
"loss": 0.2868, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 8.362068965517242, |
|
"grad_norm": 1.6983604431152344, |
|
"learning_rate": 9.680000000000001e-05, |
|
"loss": 0.2004, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 8.448275862068966, |
|
"grad_norm": 2.061176061630249, |
|
"learning_rate": 9.78e-05, |
|
"loss": 0.2025, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 8.53448275862069, |
|
"grad_norm": 1.7732270956039429, |
|
"learning_rate": 9.88e-05, |
|
"loss": 0.2598, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 8.620689655172415, |
|
"grad_norm": 1.8335466384887695, |
|
"learning_rate": 9.98e-05, |
|
"loss": 0.1876, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.620689655172415, |
|
"eval_loss": 0.4757933020591736, |
|
"eval_runtime": 39.8291, |
|
"eval_samples_per_second": 33.744, |
|
"eval_steps_per_second": 33.744, |
|
"eval_wer": 0.5221706273331512, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.706896551724139, |
|
"grad_norm": 2.52902889251709, |
|
"learning_rate": 9.939393939393939e-05, |
|
"loss": 0.2456, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 8.793103448275861, |
|
"grad_norm": 1.7294162511825562, |
|
"learning_rate": 9.863636363636364e-05, |
|
"loss": 0.2499, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 8.879310344827585, |
|
"grad_norm": 21.9121150970459, |
|
"learning_rate": 9.787878787878789e-05, |
|
"loss": 0.1854, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 8.96551724137931, |
|
"grad_norm": 3.9164559841156006, |
|
"learning_rate": 9.712121212121212e-05, |
|
"loss": 0.2576, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 9.051724137931034, |
|
"grad_norm": 1.239221215248108, |
|
"learning_rate": 9.636363636363637e-05, |
|
"loss": 0.2118, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 9.137931034482758, |
|
"grad_norm": 3.1416544914245605, |
|
"learning_rate": 9.560606060606061e-05, |
|
"loss": 0.1577, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 9.224137931034482, |
|
"grad_norm": 2.4253621101379395, |
|
"learning_rate": 9.484848484848486e-05, |
|
"loss": 0.2092, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 9.310344827586206, |
|
"grad_norm": 1.194345474243164, |
|
"learning_rate": 9.40909090909091e-05, |
|
"loss": 0.1876, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 9.39655172413793, |
|
"grad_norm": 2.411029100418091, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 0.1546, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 9.482758620689655, |
|
"grad_norm": 3.246082067489624, |
|
"learning_rate": 9.257575757575758e-05, |
|
"loss": 0.2232, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 9.482758620689655, |
|
"eval_loss": 0.45077577233314514, |
|
"eval_runtime": 39.9221, |
|
"eval_samples_per_second": 33.666, |
|
"eval_steps_per_second": 33.666, |
|
"eval_wer": 0.48921059819721385, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 9.568965517241379, |
|
"grad_norm": 1.3427454233169556, |
|
"learning_rate": 9.181818181818183e-05, |
|
"loss": 0.1777, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 9.655172413793103, |
|
"grad_norm": 1.5090447664260864, |
|
"learning_rate": 9.106060606060606e-05, |
|
"loss": 0.1646, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 9.741379310344827, |
|
"grad_norm": 1.3060975074768066, |
|
"learning_rate": 9.030303030303031e-05, |
|
"loss": 0.225, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 9.827586206896552, |
|
"grad_norm": 1.3011540174484253, |
|
"learning_rate": 8.954545454545455e-05, |
|
"loss": 0.1552, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 9.913793103448276, |
|
"grad_norm": 1.9938538074493408, |
|
"learning_rate": 8.87878787878788e-05, |
|
"loss": 0.1715, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 3.334385395050049, |
|
"learning_rate": 8.803030303030304e-05, |
|
"loss": 0.2092, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 10.086206896551724, |
|
"grad_norm": 1.011092185974121, |
|
"learning_rate": 8.727272727272727e-05, |
|
"loss": 0.14, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 10.172413793103448, |
|
"grad_norm": 2.517902135848999, |
|
"learning_rate": 8.651515151515152e-05, |
|
"loss": 0.1512, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 10.258620689655173, |
|
"grad_norm": 1.2418378591537476, |
|
"learning_rate": 8.575757575757576e-05, |
|
"loss": 0.1846, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 10.344827586206897, |
|
"grad_norm": 1.5885329246520996, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.1332, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 10.344827586206897, |
|
"eval_loss": 0.4394075274467468, |
|
"eval_runtime": 39.9367, |
|
"eval_samples_per_second": 33.653, |
|
"eval_steps_per_second": 33.653, |
|
"eval_wer": 0.4740052808886461, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 10.431034482758621, |
|
"grad_norm": 1.2539469003677368, |
|
"learning_rate": 8.424242424242424e-05, |
|
"loss": 0.1485, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 10.517241379310345, |
|
"grad_norm": 1.357601284980774, |
|
"learning_rate": 8.348484848484849e-05, |
|
"loss": 0.1988, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 10.60344827586207, |
|
"grad_norm": 2.0564587116241455, |
|
"learning_rate": 8.272727272727273e-05, |
|
"loss": 0.137, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 10.689655172413794, |
|
"grad_norm": 2.48364520072937, |
|
"learning_rate": 8.196969696969698e-05, |
|
"loss": 0.1245, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 10.775862068965518, |
|
"grad_norm": 1.015891671180725, |
|
"learning_rate": 8.121212121212121e-05, |
|
"loss": 0.1602, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 10.862068965517242, |
|
"grad_norm": 1.1023950576782227, |
|
"learning_rate": 8.045454545454546e-05, |
|
"loss": 0.1215, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 10.948275862068966, |
|
"grad_norm": 2.703427791595459, |
|
"learning_rate": 7.96969696969697e-05, |
|
"loss": 0.1621, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 11.03448275862069, |
|
"grad_norm": 1.1821691989898682, |
|
"learning_rate": 7.893939393939395e-05, |
|
"loss": 0.1651, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 11.120689655172415, |
|
"grad_norm": 0.930283784866333, |
|
"learning_rate": 7.818181818181818e-05, |
|
"loss": 0.1066, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 11.206896551724139, |
|
"grad_norm": 1.6548758745193481, |
|
"learning_rate": 7.742424242424243e-05, |
|
"loss": 0.1085, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 11.206896551724139, |
|
"eval_loss": 0.4466467499732971, |
|
"eval_runtime": 39.8633, |
|
"eval_samples_per_second": 33.715, |
|
"eval_steps_per_second": 33.715, |
|
"eval_wer": 0.46207775653282346, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 11.293103448275861, |
|
"grad_norm": 1.1760716438293457, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 0.1418, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 11.379310344827585, |
|
"grad_norm": 2.1062755584716797, |
|
"learning_rate": 7.59090909090909e-05, |
|
"loss": 0.1133, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 11.46551724137931, |
|
"grad_norm": 2.67399001121521, |
|
"learning_rate": 7.515151515151515e-05, |
|
"loss": 0.1318, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 11.551724137931034, |
|
"grad_norm": 1.0049142837524414, |
|
"learning_rate": 7.439393939393939e-05, |
|
"loss": 0.1474, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 11.637931034482758, |
|
"grad_norm": 1.586559772491455, |
|
"learning_rate": 7.363636363636364e-05, |
|
"loss": 0.0908, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 11.724137931034482, |
|
"grad_norm": 3.784040927886963, |
|
"learning_rate": 7.287878787878788e-05, |
|
"loss": 0.1521, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 11.810344827586206, |
|
"grad_norm": 1.125501275062561, |
|
"learning_rate": 7.212121212121213e-05, |
|
"loss": 0.1163, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 11.89655172413793, |
|
"grad_norm": 2.1989808082580566, |
|
"learning_rate": 7.136363636363636e-05, |
|
"loss": 0.1109, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 11.982758620689655, |
|
"grad_norm": 1.1287301778793335, |
|
"learning_rate": 7.060606060606061e-05, |
|
"loss": 0.152, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 12.068965517241379, |
|
"grad_norm": 1.538678765296936, |
|
"learning_rate": 6.984848484848485e-05, |
|
"loss": 0.098, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 12.068965517241379, |
|
"eval_loss": 0.42302384972572327, |
|
"eval_runtime": 40.1773, |
|
"eval_samples_per_second": 33.452, |
|
"eval_steps_per_second": 33.452, |
|
"eval_wer": 0.44933078393881454, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 12.155172413793103, |
|
"grad_norm": 1.400772213935852, |
|
"learning_rate": 6.90909090909091e-05, |
|
"loss": 0.092, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 12.241379310344827, |
|
"grad_norm": 3.6780846118927, |
|
"learning_rate": 6.833333333333333e-05, |
|
"loss": 0.1649, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 12.327586206896552, |
|
"grad_norm": 1.5424057245254517, |
|
"learning_rate": 6.757575757575758e-05, |
|
"loss": 0.091, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 12.413793103448276, |
|
"grad_norm": 1.4868180751800537, |
|
"learning_rate": 6.681818181818183e-05, |
|
"loss": 0.0869, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 1.1947145462036133, |
|
"learning_rate": 6.606060606060607e-05, |
|
"loss": 0.1499, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 12.586206896551724, |
|
"grad_norm": 1.0430784225463867, |
|
"learning_rate": 6.530303030303032e-05, |
|
"loss": 0.0954, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 12.672413793103448, |
|
"grad_norm": 2.4261584281921387, |
|
"learning_rate": 6.454545454545455e-05, |
|
"loss": 0.1032, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 12.758620689655173, |
|
"grad_norm": 1.033467411994934, |
|
"learning_rate": 6.37878787878788e-05, |
|
"loss": 0.1158, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 12.844827586206897, |
|
"grad_norm": 1.1535651683807373, |
|
"learning_rate": 6.303030303030302e-05, |
|
"loss": 0.0864, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 12.931034482758621, |
|
"grad_norm": 1.28826105594635, |
|
"learning_rate": 6.227272727272727e-05, |
|
"loss": 0.1219, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 12.931034482758621, |
|
"eval_loss": 0.418023020029068, |
|
"eval_runtime": 40.2192, |
|
"eval_samples_per_second": 33.417, |
|
"eval_steps_per_second": 33.417, |
|
"eval_wer": 0.44596194118182647, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 13.017241379310345, |
|
"grad_norm": 1.055411458015442, |
|
"learning_rate": 6.151515151515151e-05, |
|
"loss": 0.1289, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 13.10344827586207, |
|
"grad_norm": 1.1269094944000244, |
|
"learning_rate": 6.075757575757576e-05, |
|
"loss": 0.0776, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 13.189655172413794, |
|
"grad_norm": 1.7149118185043335, |
|
"learning_rate": 6e-05, |
|
"loss": 0.0871, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 13.275862068965518, |
|
"grad_norm": 1.7456856966018677, |
|
"learning_rate": 5.9242424242424244e-05, |
|
"loss": 0.1087, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 13.362068965517242, |
|
"grad_norm": 1.3434715270996094, |
|
"learning_rate": 5.848484848484849e-05, |
|
"loss": 0.0821, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 13.448275862068966, |
|
"grad_norm": 2.103512763977051, |
|
"learning_rate": 5.772727272727273e-05, |
|
"loss": 0.0878, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 13.53448275862069, |
|
"grad_norm": 1.240224838256836, |
|
"learning_rate": 5.696969696969697e-05, |
|
"loss": 0.1044, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 13.620689655172415, |
|
"grad_norm": 0.7336703538894653, |
|
"learning_rate": 5.6212121212121215e-05, |
|
"loss": 0.0753, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 13.706896551724139, |
|
"grad_norm": 2.293342351913452, |
|
"learning_rate": 5.545454545454546e-05, |
|
"loss": 0.1059, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 13.793103448275861, |
|
"grad_norm": 1.1853971481323242, |
|
"learning_rate": 5.46969696969697e-05, |
|
"loss": 0.1021, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 13.793103448275861, |
|
"eval_loss": 0.41785839200019836, |
|
"eval_runtime": 40.2906, |
|
"eval_samples_per_second": 33.358, |
|
"eval_steps_per_second": 33.358, |
|
"eval_wer": 0.4405900027314941, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 13.879310344827585, |
|
"grad_norm": 1.331200361251831, |
|
"learning_rate": 5.393939393939394e-05, |
|
"loss": 0.0648, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 13.96551724137931, |
|
"grad_norm": 2.28397536277771, |
|
"learning_rate": 5.3181818181818186e-05, |
|
"loss": 0.1121, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 14.051724137931034, |
|
"grad_norm": 0.9436893463134766, |
|
"learning_rate": 5.242424242424243e-05, |
|
"loss": 0.0725, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 14.137931034482758, |
|
"grad_norm": 1.6113288402557373, |
|
"learning_rate": 5.166666666666667e-05, |
|
"loss": 0.0691, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 14.224137931034482, |
|
"grad_norm": 2.479888439178467, |
|
"learning_rate": 5.090909090909091e-05, |
|
"loss": 0.0979, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 14.310344827586206, |
|
"grad_norm": 1.006616473197937, |
|
"learning_rate": 5.015151515151515e-05, |
|
"loss": 0.0909, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 14.39655172413793, |
|
"grad_norm": 1.4571704864501953, |
|
"learning_rate": 4.93939393939394e-05, |
|
"loss": 0.0761, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 14.482758620689655, |
|
"grad_norm": 1.5729875564575195, |
|
"learning_rate": 4.863636363636364e-05, |
|
"loss": 0.0862, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 14.568965517241379, |
|
"grad_norm": 1.2180376052856445, |
|
"learning_rate": 4.787878787878788e-05, |
|
"loss": 0.0646, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 14.655172413793103, |
|
"grad_norm": 1.7464072704315186, |
|
"learning_rate": 4.712121212121212e-05, |
|
"loss": 0.0741, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 14.655172413793103, |
|
"eval_loss": 0.4113341271877289, |
|
"eval_runtime": 40.2841, |
|
"eval_samples_per_second": 33.363, |
|
"eval_steps_per_second": 33.363, |
|
"eval_wer": 0.4309387234817445, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 14.741379310344827, |
|
"grad_norm": 0.8571386337280273, |
|
"learning_rate": 4.6439393939393944e-05, |
|
"loss": 0.1315, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 14.827586206896552, |
|
"grad_norm": 1.331377387046814, |
|
"learning_rate": 4.5681818181818186e-05, |
|
"loss": 0.0603, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 14.913793103448276, |
|
"grad_norm": 1.5398732423782349, |
|
"learning_rate": 4.492424242424242e-05, |
|
"loss": 0.0796, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 3.689671754837036, |
|
"learning_rate": 4.4166666666666665e-05, |
|
"loss": 0.085, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 15.086206896551724, |
|
"grad_norm": 1.132613182067871, |
|
"learning_rate": 4.340909090909091e-05, |
|
"loss": 0.0544, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 15.172413793103448, |
|
"grad_norm": 1.5951859951019287, |
|
"learning_rate": 4.265151515151515e-05, |
|
"loss": 0.0601, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 15.258620689655173, |
|
"grad_norm": 0.5179944634437561, |
|
"learning_rate": 4.189393939393939e-05, |
|
"loss": 0.097, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 15.344827586206897, |
|
"grad_norm": 0.9744370579719543, |
|
"learning_rate": 4.113636363636364e-05, |
|
"loss": 0.0596, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 15.431034482758621, |
|
"grad_norm": 1.8794275522232056, |
|
"learning_rate": 4.0378787878787885e-05, |
|
"loss": 0.0677, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 15.517241379310345, |
|
"grad_norm": 0.748386025428772, |
|
"learning_rate": 3.962121212121213e-05, |
|
"loss": 0.0896, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 15.517241379310345, |
|
"eval_loss": 0.43920788168907166, |
|
"eval_runtime": 40.1997, |
|
"eval_samples_per_second": 33.433, |
|
"eval_steps_per_second": 33.433, |
|
"eval_wer": 0.4307566238732587, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 15.60344827586207, |
|
"grad_norm": 0.9639837145805359, |
|
"learning_rate": 3.8863636363636364e-05, |
|
"loss": 0.0604, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 15.689655172413794, |
|
"grad_norm": 1.9640839099884033, |
|
"learning_rate": 3.810606060606061e-05, |
|
"loss": 0.0711, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 15.775862068965518, |
|
"grad_norm": 1.4438735246658325, |
|
"learning_rate": 3.734848484848485e-05, |
|
"loss": 0.0867, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 15.862068965517242, |
|
"grad_norm": 1.0062426328659058, |
|
"learning_rate": 3.659090909090909e-05, |
|
"loss": 0.0605, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 15.948275862068966, |
|
"grad_norm": 1.6331523656845093, |
|
"learning_rate": 3.5833333333333335e-05, |
|
"loss": 0.0662, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 16.03448275862069, |
|
"grad_norm": 0.8070217370986938, |
|
"learning_rate": 3.507575757575758e-05, |
|
"loss": 0.0765, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 16.120689655172413, |
|
"grad_norm": 1.4137670993804932, |
|
"learning_rate": 3.431818181818182e-05, |
|
"loss": 0.0537, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 16.20689655172414, |
|
"grad_norm": 1.5437769889831543, |
|
"learning_rate": 3.356060606060606e-05, |
|
"loss": 0.0684, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 16.29310344827586, |
|
"grad_norm": 0.90281081199646, |
|
"learning_rate": 3.2803030303030305e-05, |
|
"loss": 0.0744, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 16.379310344827587, |
|
"grad_norm": 1.139837622642517, |
|
"learning_rate": 3.204545454545455e-05, |
|
"loss": 0.0492, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 16.379310344827587, |
|
"eval_loss": 0.4201890528202057, |
|
"eval_runtime": 40.1502, |
|
"eval_samples_per_second": 33.474, |
|
"eval_steps_per_second": 33.474, |
|
"eval_wer": 0.4313029226987162, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 16.46551724137931, |
|
"grad_norm": 1.679457426071167, |
|
"learning_rate": 3.128787878787879e-05, |
|
"loss": 0.0652, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 16.551724137931036, |
|
"grad_norm": 0.6661111116409302, |
|
"learning_rate": 3.0530303030303034e-05, |
|
"loss": 0.0649, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 16.637931034482758, |
|
"grad_norm": 1.1774355173110962, |
|
"learning_rate": 2.9772727272727273e-05, |
|
"loss": 0.0469, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 16.724137931034484, |
|
"grad_norm": 1.783923864364624, |
|
"learning_rate": 2.901515151515152e-05, |
|
"loss": 0.0752, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 16.810344827586206, |
|
"grad_norm": 1.176321268081665, |
|
"learning_rate": 2.825757575757576e-05, |
|
"loss": 0.0519, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 16.896551724137932, |
|
"grad_norm": 1.3150608539581299, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 0.0547, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 16.982758620689655, |
|
"grad_norm": 0.983769953250885, |
|
"learning_rate": 2.674242424242424e-05, |
|
"loss": 0.0799, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 17.06896551724138, |
|
"grad_norm": 0.996890127658844, |
|
"learning_rate": 2.5984848484848483e-05, |
|
"loss": 0.0577, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 17.155172413793103, |
|
"grad_norm": 2.3034253120422363, |
|
"learning_rate": 2.5227272727272726e-05, |
|
"loss": 0.0515, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 17.24137931034483, |
|
"grad_norm": 3.7528610229492188, |
|
"learning_rate": 2.4469696969696972e-05, |
|
"loss": 0.0759, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 17.24137931034483, |
|
"eval_loss": 0.43480169773101807, |
|
"eval_runtime": 40.017, |
|
"eval_samples_per_second": 33.586, |
|
"eval_steps_per_second": 33.586, |
|
"eval_wer": 0.4207411454065374, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 17.32758620689655, |
|
"grad_norm": 0.6646668314933777, |
|
"learning_rate": 2.3712121212121214e-05, |
|
"loss": 0.0419, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 17.413793103448278, |
|
"grad_norm": 1.3250740766525269, |
|
"learning_rate": 2.2954545454545457e-05, |
|
"loss": 0.0595, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"grad_norm": 0.8094995021820068, |
|
"learning_rate": 2.21969696969697e-05, |
|
"loss": 0.0691, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 17.586206896551722, |
|
"grad_norm": 0.846946120262146, |
|
"learning_rate": 2.143939393939394e-05, |
|
"loss": 0.052, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 17.67241379310345, |
|
"grad_norm": 1.652417540550232, |
|
"learning_rate": 2.0681818181818182e-05, |
|
"loss": 0.0565, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 17.75862068965517, |
|
"grad_norm": 1.0080279111862183, |
|
"learning_rate": 1.9924242424242425e-05, |
|
"loss": 0.0745, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 17.844827586206897, |
|
"grad_norm": 0.7252691388130188, |
|
"learning_rate": 1.9166666666666667e-05, |
|
"loss": 0.0513, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 17.93103448275862, |
|
"grad_norm": 1.58548903465271, |
|
"learning_rate": 1.840909090909091e-05, |
|
"loss": 0.055, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 18.017241379310345, |
|
"grad_norm": 0.6634634733200073, |
|
"learning_rate": 1.7651515151515153e-05, |
|
"loss": 0.0658, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 18.103448275862068, |
|
"grad_norm": 1.1495524644851685, |
|
"learning_rate": 1.6893939393939395e-05, |
|
"loss": 0.0406, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 18.103448275862068, |
|
"eval_loss": 0.44191813468933105, |
|
"eval_runtime": 40.0967, |
|
"eval_samples_per_second": 33.519, |
|
"eval_steps_per_second": 33.519, |
|
"eval_wer": 0.42046799599380863, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 18.189655172413794, |
|
"grad_norm": 0.9788354635238647, |
|
"learning_rate": 1.6136363636363638e-05, |
|
"loss": 0.0381, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 18.275862068965516, |
|
"grad_norm": 1.093633770942688, |
|
"learning_rate": 1.5378787878787877e-05, |
|
"loss": 0.071, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 18.362068965517242, |
|
"grad_norm": 0.7164376974105835, |
|
"learning_rate": 1.4621212121212122e-05, |
|
"loss": 0.0439, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 18.448275862068964, |
|
"grad_norm": 0.9887032508850098, |
|
"learning_rate": 1.3863636363636364e-05, |
|
"loss": 0.0481, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 18.53448275862069, |
|
"grad_norm": 0.45052286982536316, |
|
"learning_rate": 1.3106060606060607e-05, |
|
"loss": 0.0571, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 18.620689655172413, |
|
"grad_norm": 1.167181134223938, |
|
"learning_rate": 1.234848484848485e-05, |
|
"loss": 0.0452, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 18.70689655172414, |
|
"grad_norm": 1.378661870956421, |
|
"learning_rate": 1.159090909090909e-05, |
|
"loss": 0.0643, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 18.79310344827586, |
|
"grad_norm": 0.854932963848114, |
|
"learning_rate": 1.0833333333333334e-05, |
|
"loss": 0.0587, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 18.879310344827587, |
|
"grad_norm": 0.8007526397705078, |
|
"learning_rate": 1.0075757575757576e-05, |
|
"loss": 0.0395, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 18.96551724137931, |
|
"grad_norm": 3.317830801010132, |
|
"learning_rate": 9.318181818181819e-06, |
|
"loss": 0.074, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 18.96551724137931, |
|
"eval_loss": 0.43061742186546326, |
|
"eval_runtime": 40.0034, |
|
"eval_samples_per_second": 33.597, |
|
"eval_steps_per_second": 33.597, |
|
"eval_wer": 0.420012746972594, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 19.051724137931036, |
|
"grad_norm": 0.7710875272750854, |
|
"learning_rate": 8.56060606060606e-06, |
|
"loss": 0.046, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 19.137931034482758, |
|
"grad_norm": 0.5200530886650085, |
|
"learning_rate": 7.803030303030304e-06, |
|
"loss": 0.0394, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 19.224137931034484, |
|
"grad_norm": 1.3544327020645142, |
|
"learning_rate": 7.045454545454545e-06, |
|
"loss": 0.0582, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 19.310344827586206, |
|
"grad_norm": 0.8653574585914612, |
|
"learning_rate": 6.287878787878789e-06, |
|
"loss": 0.0606, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 19.396551724137932, |
|
"grad_norm": 1.5852700471878052, |
|
"learning_rate": 5.530303030303031e-06, |
|
"loss": 0.0367, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 19.482758620689655, |
|
"grad_norm": 2.2167246341705322, |
|
"learning_rate": 4.772727272727273e-06, |
|
"loss": 0.0782, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 19.56896551724138, |
|
"grad_norm": 0.5891330242156982, |
|
"learning_rate": 4.015151515151515e-06, |
|
"loss": 0.0416, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 19.655172413793103, |
|
"grad_norm": 1.1137330532073975, |
|
"learning_rate": 3.257575757575758e-06, |
|
"loss": 0.0515, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 19.74137931034483, |
|
"grad_norm": 0.8132285475730896, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.0512, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 19.82758620689655, |
|
"grad_norm": 0.7994781136512756, |
|
"learning_rate": 1.7424242424242427e-06, |
|
"loss": 0.0378, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 19.82758620689655, |
|
"eval_loss": 0.4273350238800049, |
|
"eval_runtime": 40.0934, |
|
"eval_samples_per_second": 33.522, |
|
"eval_steps_per_second": 33.522, |
|
"eval_wer": 0.41728125284530637, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 19.913793103448278, |
|
"grad_norm": 0.9775754809379578, |
|
"learning_rate": 9.848484848484847e-07, |
|
"loss": 0.0489, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 0.8857516050338745, |
|
"learning_rate": 2.2727272727272726e-07, |
|
"loss": 0.0554, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 2320, |
|
"total_flos": 2.1476719263248095e+18, |
|
"train_loss": 0.8618391515622879, |
|
"train_runtime": 3159.4128, |
|
"train_samples_per_second": 23.397, |
|
"train_steps_per_second": 0.734 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2320, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1476719263248095e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|