|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 1784, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.8754, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.0156, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.1101, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2e-08, |
|
"loss": 4.642, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4e-08, |
|
"loss": 4.7542, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 4.7984, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8e-08, |
|
"loss": 4.6632, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 4.8177, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 5.0408, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4e-07, |
|
"loss": 4.734, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6e-07, |
|
"loss": 4.792, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-07, |
|
"loss": 4.7571, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 4.9706, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.2e-07, |
|
"loss": 4.5855, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 4.73, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.6e-07, |
|
"loss": 4.8459, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.8e-07, |
|
"loss": 4.8809, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 4.7637, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.2e-07, |
|
"loss": 4.7415, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4000000000000003e-07, |
|
"loss": 5.0442, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-07, |
|
"loss": 4.7489, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8e-07, |
|
"loss": 5.0836, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 4.9406, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 4.6989, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4e-07, |
|
"loss": 4.8695, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 4.964, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 4.8097, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 4.5301, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.2e-07, |
|
"loss": 4.9042, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.4e-07, |
|
"loss": 4.7286, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.6e-07, |
|
"loss": 4.629, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.800000000000001e-07, |
|
"loss": 5.0076, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 4.7134, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.200000000000001e-07, |
|
"loss": 4.6128, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.4e-07, |
|
"loss": 4.8588, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.6e-07, |
|
"loss": 5.102, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.800000000000001e-07, |
|
"loss": 4.7614, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.000000000000001e-07, |
|
"loss": 4.844, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.2e-07, |
|
"loss": 4.9797, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.4e-07, |
|
"loss": 4.9386, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.6e-07, |
|
"loss": 4.7569, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.8e-07, |
|
"loss": 4.56, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 4.8855, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.200000000000001e-07, |
|
"loss": 4.704, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.400000000000001e-07, |
|
"loss": 4.9959, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.6e-07, |
|
"loss": 5.0012, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.8e-07, |
|
"loss": 5.1449, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 5.5571, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 5.5908, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 5.6941, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 4.5698, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.800000000000001e-07, |
|
"loss": 4.7156, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.6848, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.02e-06, |
|
"loss": 4.5674, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.04e-06, |
|
"loss": 4.5958, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.06e-06, |
|
"loss": 4.7005, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.08e-06, |
|
"loss": 4.6157, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1e-06, |
|
"loss": 4.5708, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.12e-06, |
|
"loss": 4.5839, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.14e-06, |
|
"loss": 4.5712, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1600000000000001e-06, |
|
"loss": 4.5919, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1800000000000001e-06, |
|
"loss": 4.5206, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 4.8714, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2200000000000002e-06, |
|
"loss": 4.3526, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2400000000000002e-06, |
|
"loss": 4.6172, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.26e-06, |
|
"loss": 4.6604, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.28e-06, |
|
"loss": 4.7316, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3e-06, |
|
"loss": 4.708, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.32e-06, |
|
"loss": 4.5678, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.34e-06, |
|
"loss": 4.521, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 4.631, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3800000000000001e-06, |
|
"loss": 4.391, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 4.5851, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.42e-06, |
|
"loss": 4.3633, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.44e-06, |
|
"loss": 4.9486, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.46e-06, |
|
"loss": 4.7078, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.48e-06, |
|
"loss": 4.5117, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.5e-06, |
|
"loss": 4.4401, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.52e-06, |
|
"loss": 4.4528, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.54e-06, |
|
"loss": 4.767, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.56e-06, |
|
"loss": 4.4202, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5800000000000001e-06, |
|
"loss": 4.5276, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 4.4805, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6200000000000002e-06, |
|
"loss": 4.4425, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6400000000000002e-06, |
|
"loss": 4.3663, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6600000000000002e-06, |
|
"loss": 4.4145, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6800000000000002e-06, |
|
"loss": 4.7599, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7000000000000002e-06, |
|
"loss": 4.3985, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.72e-06, |
|
"loss": 4.7782, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.74e-06, |
|
"loss": 4.28, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.76e-06, |
|
"loss": 4.7098, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7800000000000001e-06, |
|
"loss": 4.404, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 4.2367, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8200000000000002e-06, |
|
"loss": 4.7195, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8400000000000002e-06, |
|
"loss": 4.8983, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8600000000000002e-06, |
|
"loss": 4.5096, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8800000000000002e-06, |
|
"loss": 4.6397, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 4.7396, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 5.09, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 5.0107, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.94e-06, |
|
"loss": 4.4992, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 4.2818, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.98e-06, |
|
"loss": 4.3748, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 4.3442, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.02e-06, |
|
"loss": 4.4177, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.04e-06, |
|
"loss": 4.3646, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.06e-06, |
|
"loss": 4.3934, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.08e-06, |
|
"loss": 4.5075, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 4.3691, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.12e-06, |
|
"loss": 4.432, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.1400000000000003e-06, |
|
"loss": 4.3411, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.16e-06, |
|
"loss": 4.2653, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.1800000000000003e-06, |
|
"loss": 4.4188, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.2e-06, |
|
"loss": 4.1122, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.2200000000000003e-06, |
|
"loss": 4.1511, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.24e-06, |
|
"loss": 4.3185, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.2600000000000004e-06, |
|
"loss": 4.5063, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.28e-06, |
|
"loss": 4.3419, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.3000000000000004e-06, |
|
"loss": 4.3951, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.3200000000000002e-06, |
|
"loss": 4.4755, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.3400000000000005e-06, |
|
"loss": 4.5269, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.3600000000000003e-06, |
|
"loss": 4.3495, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.38e-06, |
|
"loss": 4.5442, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 4.4358, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.42e-06, |
|
"loss": 4.095, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.4400000000000004e-06, |
|
"loss": 4.7694, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.46e-06, |
|
"loss": 4.4072, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.4800000000000004e-06, |
|
"loss": 4.391, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.5e-06, |
|
"loss": 4.2837, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.52e-06, |
|
"loss": 4.3096, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.5400000000000002e-06, |
|
"loss": 4.625, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.56e-06, |
|
"loss": 4.3118, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.5800000000000003e-06, |
|
"loss": 4.4058, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6e-06, |
|
"loss": 4.3754, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6200000000000003e-06, |
|
"loss": 4.3458, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.64e-06, |
|
"loss": 4.46, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6600000000000004e-06, |
|
"loss": 4.2079, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.68e-06, |
|
"loss": 4.4193, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7000000000000004e-06, |
|
"loss": 4.5579, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 4.2272, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7400000000000004e-06, |
|
"loss": 4.4489, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 4.7045, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7800000000000005e-06, |
|
"loss": 4.3534, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 4.495, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.82e-06, |
|
"loss": 4.7132, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.84e-06, |
|
"loss": 4.5738, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.86e-06, |
|
"loss": 4.5508, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.88e-06, |
|
"loss": 4.7028, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.9e-06, |
|
"loss": 4.8526, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.92e-06, |
|
"loss": 4.4473, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 4.0961, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.96e-06, |
|
"loss": 4.4223, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.9800000000000003e-06, |
|
"loss": 4.0474, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3e-06, |
|
"loss": 4.5267, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.0200000000000003e-06, |
|
"loss": 4.3652, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.04e-06, |
|
"loss": 4.5826, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.0600000000000003e-06, |
|
"loss": 4.3799, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.08e-06, |
|
"loss": 4.5317, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.1000000000000004e-06, |
|
"loss": 4.3339, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.12e-06, |
|
"loss": 4.3134, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.1400000000000004e-06, |
|
"loss": 4.323, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.1600000000000002e-06, |
|
"loss": 4.49, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.1800000000000005e-06, |
|
"loss": 4.2963, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 4.5581, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.2200000000000005e-06, |
|
"loss": 4.3317, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.2400000000000003e-06, |
|
"loss": 4.5466, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.2600000000000006e-06, |
|
"loss": 4.403, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.2800000000000004e-06, |
|
"loss": 4.0828, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.3000000000000006e-06, |
|
"loss": 4.1971, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.3200000000000004e-06, |
|
"loss": 4.3395, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.3400000000000006e-06, |
|
"loss": 4.429, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 4.3238, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.3800000000000007e-06, |
|
"loss": 4.3723, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 4.2269, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 4.4658, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.44e-06, |
|
"loss": 4.1989, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.46e-06, |
|
"loss": 4.2858, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.48e-06, |
|
"loss": 4.2614, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.5e-06, |
|
"loss": 4.0298, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.52e-06, |
|
"loss": 4.4752, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.54e-06, |
|
"loss": 4.3312, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.5600000000000002e-06, |
|
"loss": 4.2905, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.58e-06, |
|
"loss": 4.3776, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 4.4234, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.62e-06, |
|
"loss": 4.3858, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.6400000000000003e-06, |
|
"loss": 4.2338, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.66e-06, |
|
"loss": 4.2798, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.6800000000000003e-06, |
|
"loss": 4.5202, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.7e-06, |
|
"loss": 4.2347, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.7200000000000004e-06, |
|
"loss": 4.2765, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.74e-06, |
|
"loss": 4.4797, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.7600000000000004e-06, |
|
"loss": 4.2418, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 4.1586, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 4.3386, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.820000000000001e-06, |
|
"loss": 4.5951, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 4.4478, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.86e-06, |
|
"loss": 4.3826, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.88e-06, |
|
"loss": 4.3964, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 4.504, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 4.3849, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.94e-06, |
|
"loss": 4.3331, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.96e-06, |
|
"loss": 4.229, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.980000000000001e-06, |
|
"loss": 4.4101, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 4.091, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.0200000000000005e-06, |
|
"loss": 4.2569, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.04e-06, |
|
"loss": 4.34, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.060000000000001e-06, |
|
"loss": 4.4065, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.08e-06, |
|
"loss": 4.0967, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.1e-06, |
|
"loss": 4.1561, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.12e-06, |
|
"loss": 4.3188, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.14e-06, |
|
"loss": 4.3371, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.16e-06, |
|
"loss": 4.2419, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.18e-06, |
|
"loss": 4.2604, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 4.0518, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.22e-06, |
|
"loss": 4.4396, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.24e-06, |
|
"loss": 3.9193, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.26e-06, |
|
"loss": 4.17, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.2800000000000005e-06, |
|
"loss": 4.2655, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.3e-06, |
|
"loss": 4.2176, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.32e-06, |
|
"loss": 4.5007, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.34e-06, |
|
"loss": 4.3872, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.360000000000001e-06, |
|
"loss": 4.3805, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.38e-06, |
|
"loss": 4.1781, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.4e-06, |
|
"loss": 4.3381, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.42e-06, |
|
"loss": 4.3505, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.440000000000001e-06, |
|
"loss": 4.4967, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 4.4029, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.48e-06, |
|
"loss": 4.0799, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.5e-06, |
|
"loss": 4.2794, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.520000000000001e-06, |
|
"loss": 4.2607, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.540000000000001e-06, |
|
"loss": 4.2756, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.56e-06, |
|
"loss": 4.2805, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.58e-06, |
|
"loss": 4.4483, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 4.3671, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.620000000000001e-06, |
|
"loss": 4.1405, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.6400000000000005e-06, |
|
"loss": 4.6412, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.66e-06, |
|
"loss": 4.3655, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.680000000000001e-06, |
|
"loss": 4.3365, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7e-06, |
|
"loss": 4.3313, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7200000000000005e-06, |
|
"loss": 4.2741, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.74e-06, |
|
"loss": 4.5547, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.76e-06, |
|
"loss": 4.2917, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.78e-06, |
|
"loss": 4.3679, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 4.515, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8200000000000004e-06, |
|
"loss": 4.6678, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.84e-06, |
|
"loss": 4.5752, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.86e-06, |
|
"loss": 4.4202, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.880000000000001e-06, |
|
"loss": 4.441, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 4.1848, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.92e-06, |
|
"loss": 4.2539, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.94e-06, |
|
"loss": 4.2355, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 4.1455, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.980000000000001e-06, |
|
"loss": 4.2115, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5e-06, |
|
"loss": 4.3223, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.02e-06, |
|
"loss": 4.2708, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.04e-06, |
|
"loss": 4.1997, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.060000000000001e-06, |
|
"loss": 4.2386, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.0800000000000005e-06, |
|
"loss": 4.3548, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.1e-06, |
|
"loss": 4.4429, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.12e-06, |
|
"loss": 3.9973, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.140000000000001e-06, |
|
"loss": 4.1186, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.1600000000000006e-06, |
|
"loss": 4.349, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.18e-06, |
|
"loss": 4.2661, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.2e-06, |
|
"loss": 4.1105, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.220000000000001e-06, |
|
"loss": 4.1908, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.240000000000001e-06, |
|
"loss": 4.0259, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.2600000000000005e-06, |
|
"loss": 4.3268, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.28e-06, |
|
"loss": 4.2158, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.300000000000001e-06, |
|
"loss": 4.4676, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.320000000000001e-06, |
|
"loss": 4.0898, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.3400000000000005e-06, |
|
"loss": 4.3542, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.36e-06, |
|
"loss": 4.1318, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.380000000000001e-06, |
|
"loss": 4.0918, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 4.3307, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 4.3337, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 4.2588, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 4.3187, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.480000000000001e-06, |
|
"loss": 4.3678, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 4.2244, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.5200000000000005e-06, |
|
"loss": 4.211, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.540000000000001e-06, |
|
"loss": 4.3139, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.560000000000001e-06, |
|
"loss": 4.3719, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.580000000000001e-06, |
|
"loss": 4.3627, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 4.1325, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.620000000000001e-06, |
|
"loss": 4.2898, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.64e-06, |
|
"loss": 4.0398, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.66e-06, |
|
"loss": 4.2724, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.68e-06, |
|
"loss": 4.5272, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.7e-06, |
|
"loss": 4.2288, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.72e-06, |
|
"loss": 4.344, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.74e-06, |
|
"loss": 4.2517, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.76e-06, |
|
"loss": 4.6186, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.78e-06, |
|
"loss": 4.436, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.8e-06, |
|
"loss": 4.5283, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.82e-06, |
|
"loss": 4.4409, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.84e-06, |
|
"loss": 4.3245, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.86e-06, |
|
"loss": 4.3295, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 4.4948, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.9e-06, |
|
"loss": 4.2845, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.92e-06, |
|
"loss": 4.5504, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.94e-06, |
|
"loss": 4.2967, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 4.1238, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.98e-06, |
|
"loss": 4.3354, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6e-06, |
|
"loss": 4.1704, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.02e-06, |
|
"loss": 4.3119, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.040000000000001e-06, |
|
"loss": 4.2196, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 4.4256, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.08e-06, |
|
"loss": 4.4929, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.1e-06, |
|
"loss": 4.4737, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.120000000000001e-06, |
|
"loss": 4.2571, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.1400000000000005e-06, |
|
"loss": 4.2761, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.16e-06, |
|
"loss": 4.2316, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.18e-06, |
|
"loss": 4.2501, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.200000000000001e-06, |
|
"loss": 4.3589, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.220000000000001e-06, |
|
"loss": 4.4174, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.24e-06, |
|
"loss": 4.1764, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.26e-06, |
|
"loss": 4.223, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.280000000000001e-06, |
|
"loss": 3.9413, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.300000000000001e-06, |
|
"loss": 4.1358, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.3200000000000005e-06, |
|
"loss": 4.3021, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.34e-06, |
|
"loss": 4.5006, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.360000000000001e-06, |
|
"loss": 4.2984, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.380000000000001e-06, |
|
"loss": 4.3466, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 4.2219, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.42e-06, |
|
"loss": 4.1972, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.440000000000001e-06, |
|
"loss": 4.1798, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 4.0998, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.480000000000001e-06, |
|
"loss": 4.223, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 4.1664, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.520000000000001e-06, |
|
"loss": 4.4633, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.540000000000001e-06, |
|
"loss": 4.3068, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 4.3922, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.5800000000000005e-06, |
|
"loss": 4.4014, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.600000000000001e-06, |
|
"loss": 4.3897, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.620000000000001e-06, |
|
"loss": 4.3967, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.640000000000001e-06, |
|
"loss": 4.1708, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.660000000000001e-06, |
|
"loss": 4.2822, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.680000000000001e-06, |
|
"loss": 4.1643, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.700000000000001e-06, |
|
"loss": 4.2395, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.720000000000001e-06, |
|
"loss": 4.2107, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.740000000000001e-06, |
|
"loss": 4.1998, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.760000000000001e-06, |
|
"loss": 4.3972, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.780000000000001e-06, |
|
"loss": 4.4682, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 4.0262, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.820000000000001e-06, |
|
"loss": 4.2869, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.8400000000000014e-06, |
|
"loss": 4.2299, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.860000000000001e-06, |
|
"loss": 4.709, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.88e-06, |
|
"loss": 4.5604, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.9e-06, |
|
"loss": 4.2687, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.92e-06, |
|
"loss": 4.2409, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.9400000000000005e-06, |
|
"loss": 4.3607, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.96e-06, |
|
"loss": 4.4031, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.98e-06, |
|
"loss": 4.0405, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7e-06, |
|
"loss": 4.2234, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.0200000000000006e-06, |
|
"loss": 3.8863, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.04e-06, |
|
"loss": 4.1162, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.06e-06, |
|
"loss": 4.3389, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.08e-06, |
|
"loss": 4.321, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.100000000000001e-06, |
|
"loss": 4.4627, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.1200000000000004e-06, |
|
"loss": 4.2116, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.14e-06, |
|
"loss": 4.2391, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.16e-06, |
|
"loss": 4.0248, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.180000000000001e-06, |
|
"loss": 4.305, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 4.318, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.22e-06, |
|
"loss": 4.2532, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.24e-06, |
|
"loss": 4.3919, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.260000000000001e-06, |
|
"loss": 4.2219, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.280000000000001e-06, |
|
"loss": 4.1523, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.3e-06, |
|
"loss": 4.0451, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.32e-06, |
|
"loss": 4.1412, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.340000000000001e-06, |
|
"loss": 4.2046, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.360000000000001e-06, |
|
"loss": 4.5805, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.3800000000000005e-06, |
|
"loss": 4.2065, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.4e-06, |
|
"loss": 4.0656, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 4.0191, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.440000000000001e-06, |
|
"loss": 4.1177, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 4.1314, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.48e-06, |
|
"loss": 4.0029, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 4.1751, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.520000000000001e-06, |
|
"loss": 4.3463, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.540000000000001e-06, |
|
"loss": 3.9544, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 4.1137, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.58e-06, |
|
"loss": 4.2628, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 4.0839, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.620000000000001e-06, |
|
"loss": 4.367, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.640000000000001e-06, |
|
"loss": 4.3549, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.660000000000001e-06, |
|
"loss": 4.3025, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 4.0627, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.7e-06, |
|
"loss": 4.3837, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.72e-06, |
|
"loss": 4.5904, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.74e-06, |
|
"loss": 4.1365, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.76e-06, |
|
"loss": 4.6022, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.78e-06, |
|
"loss": 4.6247, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.800000000000002e-06, |
|
"loss": 4.2958, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.820000000000001e-06, |
|
"loss": 4.4249, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.840000000000001e-06, |
|
"loss": 4.3033, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 4.3635, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.88e-06, |
|
"loss": 4.7299, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.9e-06, |
|
"loss": 5.0037, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.92e-06, |
|
"loss": 4.9575, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.94e-06, |
|
"loss": 4.3872, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 4.0742, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.980000000000002e-06, |
|
"loss": 4.1041, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 4.2468, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.020000000000001e-06, |
|
"loss": 4.0194, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.040000000000001e-06, |
|
"loss": 4.21, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.06e-06, |
|
"loss": 4.1145, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.08e-06, |
|
"loss": 4.2654, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.1e-06, |
|
"loss": 4.3368, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.120000000000002e-06, |
|
"loss": 4.2675, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.14e-06, |
|
"loss": 4.2297, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.16e-06, |
|
"loss": 4.2588, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.18e-06, |
|
"loss": 4.3025, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.2e-06, |
|
"loss": 4.2689, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.220000000000001e-06, |
|
"loss": 4.2883, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.24e-06, |
|
"loss": 4.1599, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.26e-06, |
|
"loss": 4.2517, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.28e-06, |
|
"loss": 4.2504, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.3e-06, |
|
"loss": 4.2981, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.32e-06, |
|
"loss": 4.2624, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.34e-06, |
|
"loss": 4.139, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.36e-06, |
|
"loss": 4.4604, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.380000000000001e-06, |
|
"loss": 4.0497, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 4.5089, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.42e-06, |
|
"loss": 4.4225, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.44e-06, |
|
"loss": 4.2752, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.46e-06, |
|
"loss": 4.0643, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.48e-06, |
|
"loss": 4.0671, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.5e-06, |
|
"loss": 4.1966, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.52e-06, |
|
"loss": 4.2951, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.540000000000001e-06, |
|
"loss": 4.1551, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 4.316, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.580000000000001e-06, |
|
"loss": 4.2471, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.6e-06, |
|
"loss": 4.3994, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.62e-06, |
|
"loss": 4.3803, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.64e-06, |
|
"loss": 4.0281, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.66e-06, |
|
"loss": 4.227, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.68e-06, |
|
"loss": 4.3516, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.700000000000001e-06, |
|
"loss": 4.1701, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.720000000000001e-06, |
|
"loss": 4.4076, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.740000000000001e-06, |
|
"loss": 4.3809, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.76e-06, |
|
"loss": 4.3276, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.78e-06, |
|
"loss": 4.304, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.8e-06, |
|
"loss": 4.5344, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.82e-06, |
|
"loss": 4.3241, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.84e-06, |
|
"loss": 4.9005, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.860000000000002e-06, |
|
"loss": 4.5209, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.880000000000001e-06, |
|
"loss": 5.0882, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.900000000000001e-06, |
|
"loss": 4.4276, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 4.4119, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.94e-06, |
|
"loss": 3.9568, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.96e-06, |
|
"loss": 4.0944, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.98e-06, |
|
"loss": 4.245, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9e-06, |
|
"loss": 3.9442, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.020000000000002e-06, |
|
"loss": 3.8766, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.040000000000002e-06, |
|
"loss": 4.1665, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.060000000000001e-06, |
|
"loss": 4.0158, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.080000000000001e-06, |
|
"loss": 3.9961, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.100000000000001e-06, |
|
"loss": 3.8735, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.12e-06, |
|
"loss": 4.1623, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.14e-06, |
|
"loss": 4.0337, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.16e-06, |
|
"loss": 4.0507, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.180000000000002e-06, |
|
"loss": 4.1257, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 4.1256, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.220000000000002e-06, |
|
"loss": 4.0574, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.240000000000001e-06, |
|
"loss": 4.2874, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.260000000000001e-06, |
|
"loss": 4.1188, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.280000000000001e-06, |
|
"loss": 4.2122, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.3e-06, |
|
"loss": 4.0582, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.32e-06, |
|
"loss": 4.1683, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.340000000000002e-06, |
|
"loss": 4.3343, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 4.1305, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.38e-06, |
|
"loss": 4.1566, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.4e-06, |
|
"loss": 4.2579, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.42e-06, |
|
"loss": 4.2334, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.440000000000001e-06, |
|
"loss": 4.3682, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.460000000000001e-06, |
|
"loss": 4.3548, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.48e-06, |
|
"loss": 4.2328, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.5e-06, |
|
"loss": 4.2952, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.52e-06, |
|
"loss": 4.1688, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.54e-06, |
|
"loss": 4.0214, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.56e-06, |
|
"loss": 4.1373, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.58e-06, |
|
"loss": 4.4781, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 4.042, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.620000000000001e-06, |
|
"loss": 4.3743, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.640000000000001e-06, |
|
"loss": 4.2504, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.66e-06, |
|
"loss": 4.3684, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.68e-06, |
|
"loss": 4.5044, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.7e-06, |
|
"loss": 4.4635, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.72e-06, |
|
"loss": 4.2962, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.74e-06, |
|
"loss": 4.1692, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.760000000000001e-06, |
|
"loss": 4.4739, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 4.3998, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 4.4301, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.820000000000001e-06, |
|
"loss": 4.4232, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.84e-06, |
|
"loss": 4.3393, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.86e-06, |
|
"loss": 4.1971, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.88e-06, |
|
"loss": 4.6193, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.9e-06, |
|
"loss": 4.7138, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 4.1474, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 4.255368709564209, |
|
"eval_runtime": 967.0827, |
|
"eval_samples_per_second": 2.732, |
|
"eval_steps_per_second": 0.342, |
|
"eval_wer": 1.9570606902023007, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.940000000000001e-06, |
|
"loss": 4.2678, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 4.0115, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.980000000000001e-06, |
|
"loss": 4.0922, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1e-05, |
|
"loss": 4.2905, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.992211838006231e-06, |
|
"loss": 3.9885, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.984423676012462e-06, |
|
"loss": 4.3115, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.976635514018693e-06, |
|
"loss": 4.1528, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.968847352024923e-06, |
|
"loss": 4.1064, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.961059190031154e-06, |
|
"loss": 4.2746, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.953271028037384e-06, |
|
"loss": 4.3826, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.945482866043615e-06, |
|
"loss": 4.2451, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.937694704049845e-06, |
|
"loss": 4.0663, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.929906542056076e-06, |
|
"loss": 4.08, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.922118380062306e-06, |
|
"loss": 4.0253, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.914330218068537e-06, |
|
"loss": 4.1789, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.906542056074768e-06, |
|
"loss": 4.3094, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.898753894080998e-06, |
|
"loss": 4.2065, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.890965732087229e-06, |
|
"loss": 4.578, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.883177570093458e-06, |
|
"loss": 4.3166, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.87538940809969e-06, |
|
"loss": 3.9214, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.86760124610592e-06, |
|
"loss": 3.9699, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.859813084112151e-06, |
|
"loss": 4.0507, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.85202492211838e-06, |
|
"loss": 4.0245, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.844236760124612e-06, |
|
"loss": 4.2919, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.836448598130843e-06, |
|
"loss": 4.3836, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.828660436137073e-06, |
|
"loss": 4.2799, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.820872274143302e-06, |
|
"loss": 4.3833, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.813084112149533e-06, |
|
"loss": 3.9954, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.805295950155765e-06, |
|
"loss": 4.3376, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.797507788161996e-06, |
|
"loss": 4.2853, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.789719626168224e-06, |
|
"loss": 4.1192, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.781931464174455e-06, |
|
"loss": 3.958, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.774143302180686e-06, |
|
"loss": 4.0275, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.766355140186918e-06, |
|
"loss": 3.9442, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.758566978193147e-06, |
|
"loss": 4.4104, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.750778816199377e-06, |
|
"loss": 4.2646, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.742990654205608e-06, |
|
"loss": 4.1178, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.73520249221184e-06, |
|
"loss": 4.3798, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.727414330218069e-06, |
|
"loss": 4.298, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.7196261682243e-06, |
|
"loss": 4.3442, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.71183800623053e-06, |
|
"loss": 4.369, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.70404984423676e-06, |
|
"loss": 4.3828, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.696261682242991e-06, |
|
"loss": 4.1793, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.688473520249222e-06, |
|
"loss": 4.3259, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.680685358255452e-06, |
|
"loss": 4.1225, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.672897196261683e-06, |
|
"loss": 4.2375, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.665109034267914e-06, |
|
"loss": 4.7295, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.657320872274144e-06, |
|
"loss": 4.4102, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.649532710280375e-06, |
|
"loss": 4.8559, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.641744548286605e-06, |
|
"loss": 4.6998, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.633956386292836e-06, |
|
"loss": 4.446, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.626168224299066e-06, |
|
"loss": 4.3452, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.618380062305297e-06, |
|
"loss": 4.1782, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.610591900311527e-06, |
|
"loss": 4.184, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.602803738317758e-06, |
|
"loss": 4.0768, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.595015576323989e-06, |
|
"loss": 4.1894, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.58722741433022e-06, |
|
"loss": 4.2925, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.57943925233645e-06, |
|
"loss": 4.123, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.57165109034268e-06, |
|
"loss": 4.0135, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.563862928348911e-06, |
|
"loss": 4.0661, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.556074766355141e-06, |
|
"loss": 4.3426, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.548286604361372e-06, |
|
"loss": 4.0666, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.540498442367601e-06, |
|
"loss": 3.972, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.532710280373833e-06, |
|
"loss": 4.4293, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.524922118380064e-06, |
|
"loss": 4.3269, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.517133956386294e-06, |
|
"loss": 4.1747, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.509345794392523e-06, |
|
"loss": 4.0616, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.501557632398755e-06, |
|
"loss": 4.2889, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.493769470404986e-06, |
|
"loss": 4.2129, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.485981308411217e-06, |
|
"loss": 4.4106, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.478193146417445e-06, |
|
"loss": 4.1228, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.470404984423676e-06, |
|
"loss": 4.3631, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.462616822429908e-06, |
|
"loss": 4.0569, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.454828660436139e-06, |
|
"loss": 4.021, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.447040498442368e-06, |
|
"loss": 4.105, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.439252336448598e-06, |
|
"loss": 4.1015, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.431464174454829e-06, |
|
"loss": 4.1865, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.423676012461061e-06, |
|
"loss": 4.0787, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.41588785046729e-06, |
|
"loss": 4.1433, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.40809968847352e-06, |
|
"loss": 4.3971, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.400311526479751e-06, |
|
"loss": 4.2507, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.392523364485983e-06, |
|
"loss": 4.2991, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.384735202492212e-06, |
|
"loss": 3.979, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.376947040498443e-06, |
|
"loss": 4.4656, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.369158878504673e-06, |
|
"loss": 4.2632, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.361370716510904e-06, |
|
"loss": 4.138, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.353582554517135e-06, |
|
"loss": 4.3153, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.345794392523365e-06, |
|
"loss": 4.3056, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.338006230529596e-06, |
|
"loss": 4.3856, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.330218068535826e-06, |
|
"loss": 4.3454, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.322429906542057e-06, |
|
"loss": 4.1934, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.314641744548287e-06, |
|
"loss": 4.4161, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.306853582554518e-06, |
|
"loss": 4.1336, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.299065420560748e-06, |
|
"loss": 4.4124, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.291277258566979e-06, |
|
"loss": 4.3579, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.28348909657321e-06, |
|
"loss": 4.5212, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.27570093457944e-06, |
|
"loss": 4.4774, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.26791277258567e-06, |
|
"loss": 4.2913, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.260124610591901e-06, |
|
"loss": 4.7102, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.252336448598132e-06, |
|
"loss": 4.8834, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.244548286604362e-06, |
|
"loss": 3.9224, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.236760124610593e-06, |
|
"loss": 4.249, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.228971962616824e-06, |
|
"loss": 4.1371, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.221183800623054e-06, |
|
"loss": 4.2153, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.213395638629285e-06, |
|
"loss": 4.0769, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.205607476635515e-06, |
|
"loss": 4.034, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.197819314641744e-06, |
|
"loss": 4.1589, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.190031152647976e-06, |
|
"loss": 4.1378, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.182242990654207e-06, |
|
"loss": 4.2825, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.174454828660438e-06, |
|
"loss": 4.0852, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 4.4104, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.158878504672899e-06, |
|
"loss": 4.1579, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.15109034267913e-06, |
|
"loss": 4.283, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.14330218068536e-06, |
|
"loss": 4.1766, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.135514018691589e-06, |
|
"loss": 4.26, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.12772585669782e-06, |
|
"loss": 4.0639, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.119937694704052e-06, |
|
"loss": 4.0882, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.112149532710282e-06, |
|
"loss": 4.1306, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.104361370716511e-06, |
|
"loss": 4.0774, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.096573208722742e-06, |
|
"loss": 4.1765, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.088785046728972e-06, |
|
"loss": 4.2682, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.080996884735204e-06, |
|
"loss": 4.1364, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.073208722741433e-06, |
|
"loss": 4.2616, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.065420560747664e-06, |
|
"loss": 4.4306, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.057632398753894e-06, |
|
"loss": 4.3394, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.049844236760127e-06, |
|
"loss": 4.1804, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.042056074766356e-06, |
|
"loss": 4.1363, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.034267912772586e-06, |
|
"loss": 4.4194, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.026479750778817e-06, |
|
"loss": 4.4591, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.018691588785047e-06, |
|
"loss": 4.1743, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.010903426791278e-06, |
|
"loss": 4.2064, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.003115264797508e-06, |
|
"loss": 4.2854, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.995327102803739e-06, |
|
"loss": 4.1342, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.98753894080997e-06, |
|
"loss": 4.2015, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.9797507788162e-06, |
|
"loss": 4.2676, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.97196261682243e-06, |
|
"loss": 4.0735, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.964174454828661e-06, |
|
"loss": 4.2914, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.956386292834892e-06, |
|
"loss": 3.9361, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.948598130841122e-06, |
|
"loss": 4.3117, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.940809968847353e-06, |
|
"loss": 4.5528, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.933021806853583e-06, |
|
"loss": 4.0742, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.925233644859814e-06, |
|
"loss": 4.3152, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.917445482866045e-06, |
|
"loss": 4.2238, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.909657320872275e-06, |
|
"loss": 4.3923, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.901869158878506e-06, |
|
"loss": 4.2882, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.894080996884736e-06, |
|
"loss": 4.4868, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.886292834890967e-06, |
|
"loss": 4.6624, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.878504672897197e-06, |
|
"loss": 4.8206, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.870716510903428e-06, |
|
"loss": 4.7514, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.862928348909659e-06, |
|
"loss": 4.6551, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.855140186915887e-06, |
|
"loss": 4.1182, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.84735202492212e-06, |
|
"loss": 4.0949, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.83956386292835e-06, |
|
"loss": 4.1354, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.83177570093458e-06, |
|
"loss": 4.1571, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.82398753894081e-06, |
|
"loss": 4.1177, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.81619937694704e-06, |
|
"loss": 4.1068, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.808411214953273e-06, |
|
"loss": 4.1315, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.800623052959503e-06, |
|
"loss": 4.3469, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.792834890965732e-06, |
|
"loss": 4.3669, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.785046728971963e-06, |
|
"loss": 4.199, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.777258566978195e-06, |
|
"loss": 4.2819, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.769470404984425e-06, |
|
"loss": 4.2564, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.761682242990654e-06, |
|
"loss": 4.2846, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.753894080996885e-06, |
|
"loss": 4.2476, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.746105919003115e-06, |
|
"loss": 4.0793, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.738317757009348e-06, |
|
"loss": 4.172, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.730529595015576e-06, |
|
"loss": 4.1642, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.722741433021807e-06, |
|
"loss": 4.0363, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.714953271028038e-06, |
|
"loss": 4.108, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.707165109034268e-06, |
|
"loss": 4.33, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.699376947040499e-06, |
|
"loss": 4.2905, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.69158878504673e-06, |
|
"loss": 4.3326, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.68380062305296e-06, |
|
"loss": 4.1442, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.67601246105919e-06, |
|
"loss": 4.1272, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.668224299065421e-06, |
|
"loss": 4.0058, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.660436137071652e-06, |
|
"loss": 4.2193, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.652647975077882e-06, |
|
"loss": 4.1684, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.644859813084113e-06, |
|
"loss": 4.2177, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.637071651090343e-06, |
|
"loss": 4.0278, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.629283489096574e-06, |
|
"loss": 4.198, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.621495327102804e-06, |
|
"loss": 4.1532, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.613707165109035e-06, |
|
"loss": 4.2937, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.605919003115266e-06, |
|
"loss": 4.3373, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.598130841121496e-06, |
|
"loss": 4.2961, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.590342679127727e-06, |
|
"loss": 4.409, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.582554517133957e-06, |
|
"loss": 4.2937, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.574766355140188e-06, |
|
"loss": 3.9739, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.566978193146418e-06, |
|
"loss": 4.0866, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.559190031152649e-06, |
|
"loss": 4.2485, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.55140186915888e-06, |
|
"loss": 4.2759, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.54361370716511e-06, |
|
"loss": 4.1216, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.53582554517134e-06, |
|
"loss": 4.3502, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.528037383177571e-06, |
|
"loss": 4.199, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.520249221183802e-06, |
|
"loss": 4.3211, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.51246105919003e-06, |
|
"loss": 4.4308, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.504672897196263e-06, |
|
"loss": 4.5234, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.496884735202494e-06, |
|
"loss": 3.9568, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.489096573208724e-06, |
|
"loss": 4.3052, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.481308411214953e-06, |
|
"loss": 4.2503, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.473520249221184e-06, |
|
"loss": 4.3216, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.465732087227416e-06, |
|
"loss": 4.042, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.457943925233646e-06, |
|
"loss": 4.1536, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.450155763239875e-06, |
|
"loss": 4.1513, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.442367601246106e-06, |
|
"loss": 4.3788, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.434579439252338e-06, |
|
"loss": 4.0126, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.426791277258569e-06, |
|
"loss": 4.1083, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.419003115264797e-06, |
|
"loss": 3.9839, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.411214953271028e-06, |
|
"loss": 4.1265, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.403426791277259e-06, |
|
"loss": 3.99, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.395638629283491e-06, |
|
"loss": 4.222, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.38785046728972e-06, |
|
"loss": 4.2499, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.38006230529595e-06, |
|
"loss": 4.0893, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.372274143302181e-06, |
|
"loss": 4.1096, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.364485981308411e-06, |
|
"loss": 4.512, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.356697819314642e-06, |
|
"loss": 4.1642, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.348909657320873e-06, |
|
"loss": 3.9761, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.341121495327103e-06, |
|
"loss": 4.1783, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 4.3789, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.325545171339564e-06, |
|
"loss": 4.0808, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.317757009345795e-06, |
|
"loss": 4.2484, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.309968847352025e-06, |
|
"loss": 4.3198, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.302180685358256e-06, |
|
"loss": 4.2864, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.294392523364487e-06, |
|
"loss": 4.1671, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.286604361370717e-06, |
|
"loss": 4.4239, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.278816199376948e-06, |
|
"loss": 4.2569, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.271028037383178e-06, |
|
"loss": 4.1546, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.263239875389409e-06, |
|
"loss": 3.8699, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.25545171339564e-06, |
|
"loss": 4.2788, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.24766355140187e-06, |
|
"loss": 4.1884, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.2398753894081e-06, |
|
"loss": 4.1335, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.232087227414331e-06, |
|
"loss": 4.1907, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.224299065420562e-06, |
|
"loss": 4.2156, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.216510903426792e-06, |
|
"loss": 4.2277, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.208722741433023e-06, |
|
"loss": 4.2226, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.200934579439253e-06, |
|
"loss": 4.2642, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.193146417445484e-06, |
|
"loss": 4.1598, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.185358255451715e-06, |
|
"loss": 4.1206, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.177570093457945e-06, |
|
"loss": 3.9079, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.169781931464174e-06, |
|
"loss": 4.0799, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.161993769470406e-06, |
|
"loss": 4.1442, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.154205607476637e-06, |
|
"loss": 4.3579, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.146417445482867e-06, |
|
"loss": 4.0243, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.138629283489096e-06, |
|
"loss": 4.1691, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.130841121495327e-06, |
|
"loss": 4.142, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.123052959501559e-06, |
|
"loss": 4.4914, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.11526479750779e-06, |
|
"loss": 4.113, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.107476635514018e-06, |
|
"loss": 4.4744, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.099688473520249e-06, |
|
"loss": 4.7256, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.091900311526481e-06, |
|
"loss": 4.2353, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.084112149532712e-06, |
|
"loss": 4.5858, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.07632398753894e-06, |
|
"loss": 4.2206, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.068535825545171e-06, |
|
"loss": 4.2157, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.060747663551402e-06, |
|
"loss": 4.2556, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.052959501557634e-06, |
|
"loss": 3.9242, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.045171339563863e-06, |
|
"loss": 4.0884, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.037383177570094e-06, |
|
"loss": 4.3085, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.029595015576324e-06, |
|
"loss": 3.9887, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.021806853582555e-06, |
|
"loss": 4.1841, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.014018691588785e-06, |
|
"loss": 4.2353, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.006230529595016e-06, |
|
"loss": 4.1268, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.998442367601246e-06, |
|
"loss": 4.4207, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.990654205607477e-06, |
|
"loss": 3.953, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.982866043613708e-06, |
|
"loss": 4.2151, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.975077881619938e-06, |
|
"loss": 3.9338, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.967289719626169e-06, |
|
"loss": 3.9276, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.9595015576324e-06, |
|
"loss": 4.0294, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.95171339563863e-06, |
|
"loss": 4.274, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.94392523364486e-06, |
|
"loss": 3.9668, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.936137071651091e-06, |
|
"loss": 4.4891, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.928348909657322e-06, |
|
"loss": 4.2079, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.920560747663552e-06, |
|
"loss": 4.089, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.912772585669783e-06, |
|
"loss": 4.1559, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.904984423676013e-06, |
|
"loss": 4.071, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.897196261682244e-06, |
|
"loss": 4.1898, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.889408099688474e-06, |
|
"loss": 4.0508, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.881619937694705e-06, |
|
"loss": 4.1177, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.873831775700936e-06, |
|
"loss": 3.9818, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.866043613707166e-06, |
|
"loss": 4.3853, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.858255451713395e-06, |
|
"loss": 4.2732, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.850467289719627e-06, |
|
"loss": 4.341, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.842679127725858e-06, |
|
"loss": 4.2909, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.834890965732088e-06, |
|
"loss": 4.322, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.827102803738317e-06, |
|
"loss": 4.2221, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.81931464174455e-06, |
|
"loss": 4.1536, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.81152647975078e-06, |
|
"loss": 3.9918, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.80373831775701e-06, |
|
"loss": 4.2978, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.79595015576324e-06, |
|
"loss": 3.7971, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.78816199376947e-06, |
|
"loss": 4.294, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.780373831775702e-06, |
|
"loss": 4.2495, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.772585669781933e-06, |
|
"loss": 4.2841, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.764797507788162e-06, |
|
"loss": 4.4303, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.757009345794392e-06, |
|
"loss": 4.0014, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.749221183800623e-06, |
|
"loss": 4.2482, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.741433021806855e-06, |
|
"loss": 4.1339, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.733644859813084e-06, |
|
"loss": 4.5166, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.725856697819315e-06, |
|
"loss": 4.2732, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.718068535825545e-06, |
|
"loss": 4.0227, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.710280373831777e-06, |
|
"loss": 4.6596, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.702492211838006e-06, |
|
"loss": 4.3819, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.694704049844237e-06, |
|
"loss": 4.4056, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.686915887850467e-06, |
|
"loss": 4.219, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.679127725856698e-06, |
|
"loss": 4.1491, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.671339563862929e-06, |
|
"loss": 4.175, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.663551401869159e-06, |
|
"loss": 4.1422, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.65576323987539e-06, |
|
"loss": 4.2448, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.64797507788162e-06, |
|
"loss": 4.2871, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.64018691588785e-06, |
|
"loss": 4.1088, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.632398753894081e-06, |
|
"loss": 3.8695, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.624610591900312e-06, |
|
"loss": 4.3049, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.616822429906543e-06, |
|
"loss": 4.2783, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.609034267912772e-06, |
|
"loss": 4.2466, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.601246105919004e-06, |
|
"loss": 4.1957, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.593457943925234e-06, |
|
"loss": 4.1461, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.585669781931465e-06, |
|
"loss": 4.0176, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.5778816199376945e-06, |
|
"loss": 4.1662, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.570093457943926e-06, |
|
"loss": 4.1299, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.5623052959501565e-06, |
|
"loss": 4.0579, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.554517133956387e-06, |
|
"loss": 4.2202, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.546728971962617e-06, |
|
"loss": 4.1788, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.538940809968847e-06, |
|
"loss": 4.1489, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.531152647975079e-06, |
|
"loss": 4.3573, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.523364485981309e-06, |
|
"loss": 4.0956, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.515576323987539e-06, |
|
"loss": 4.2922, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.50778816199377e-06, |
|
"loss": 4.2779, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 4.1494, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.492211838006232e-06, |
|
"loss": 4.2543, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.484423676012462e-06, |
|
"loss": 4.309, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.476635514018692e-06, |
|
"loss": 4.0422, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.4688473520249225e-06, |
|
"loss": 4.0777, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.461059190031154e-06, |
|
"loss": 4.0835, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.4532710280373844e-06, |
|
"loss": 4.4428, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.445482866043614e-06, |
|
"loss": 4.461, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.437694704049845e-06, |
|
"loss": 4.2621, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.429906542056075e-06, |
|
"loss": 3.9154, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.422118380062307e-06, |
|
"loss": 3.9634, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.4143302180685364e-06, |
|
"loss": 4.115, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.406542056074767e-06, |
|
"loss": 4.3052, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.3987538940809976e-06, |
|
"loss": 4.131, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.390965732087229e-06, |
|
"loss": 4.0391, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.383177570093458e-06, |
|
"loss": 4.1468, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.375389408099689e-06, |
|
"loss": 4.1885, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.36760124610592e-06, |
|
"loss": 4.1732, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.35981308411215e-06, |
|
"loss": 4.1645, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.35202492211838e-06, |
|
"loss": 4.2706, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.3442367601246115e-06, |
|
"loss": 4.0413, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.336448598130842e-06, |
|
"loss": 4.1605, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.328660436137073e-06, |
|
"loss": 4.0483, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.320872274143302e-06, |
|
"loss": 4.4353, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.313084112149533e-06, |
|
"loss": 4.6819, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.305295950155764e-06, |
|
"loss": 4.736, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.297507788161995e-06, |
|
"loss": 4.2232, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.289719626168225e-06, |
|
"loss": 4.0883, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.281931464174455e-06, |
|
"loss": 4.1672, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.274143302180686e-06, |
|
"loss": 4.2185, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.266355140186917e-06, |
|
"loss": 4.2033, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.258566978193147e-06, |
|
"loss": 4.0836, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.2507788161993775e-06, |
|
"loss": 4.1439, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.242990654205608e-06, |
|
"loss": 4.4477, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.2352024922118395e-06, |
|
"loss": 4.152, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.227414330218069e-06, |
|
"loss": 4.2131, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.2196261682243e-06, |
|
"loss": 3.8407, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.21183800623053e-06, |
|
"loss": 4.0907, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.204049844236761e-06, |
|
"loss": 4.1275, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.196261682242991e-06, |
|
"loss": 4.0594, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.188473520249222e-06, |
|
"loss": 4.0733, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.180685358255453e-06, |
|
"loss": 4.2694, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.172897196261683e-06, |
|
"loss": 4.182, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.165109034267913e-06, |
|
"loss": 4.2203, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.1573208722741435e-06, |
|
"loss": 4.207, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.149532710280375e-06, |
|
"loss": 4.1295, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.1417445482866054e-06, |
|
"loss": 4.1752, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.133956386292835e-06, |
|
"loss": 4.2087, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.126168224299066e-06, |
|
"loss": 4.171, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.118380062305297e-06, |
|
"loss": 4.1026, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.110591900311528e-06, |
|
"loss": 4.3376, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.1028037383177574e-06, |
|
"loss": 4.1961, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.095015576323988e-06, |
|
"loss": 4.5628, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.0872274143302186e-06, |
|
"loss": 4.2224, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.07943925233645e-06, |
|
"loss": 4.1327, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.07165109034268e-06, |
|
"loss": 4.2913, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.06386292834891e-06, |
|
"loss": 4.3579, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.056074766355141e-06, |
|
"loss": 3.9194, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.048286604361371e-06, |
|
"loss": 4.2405, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.040498442367601e-06, |
|
"loss": 4.3274, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.0327102803738325e-06, |
|
"loss": 4.3691, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.024922118380063e-06, |
|
"loss": 4.4093, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.017133956386294e-06, |
|
"loss": 4.518, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.009345794392523e-06, |
|
"loss": 4.0578, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.001557632398755e-06, |
|
"loss": 3.9511, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.993769470404985e-06, |
|
"loss": 4.3573, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.985981308411216e-06, |
|
"loss": 4.2739, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.978193146417446e-06, |
|
"loss": 4.3667, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.970404984423676e-06, |
|
"loss": 4.5281, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.962616822429908e-06, |
|
"loss": 4.3019, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.954828660436138e-06, |
|
"loss": 4.2797, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.947040498442368e-06, |
|
"loss": 4.0694, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.9392523364485985e-06, |
|
"loss": 4.2843, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.931464174454829e-06, |
|
"loss": 4.4027, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.9236760124610605e-06, |
|
"loss": 4.0206, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.91588785046729e-06, |
|
"loss": 4.7399, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.908099688473521e-06, |
|
"loss": 4.2177, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.900311526479751e-06, |
|
"loss": 4.0385, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.892523364485983e-06, |
|
"loss": 4.4444, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.8847352024922125e-06, |
|
"loss": 4.0726, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.876947040498443e-06, |
|
"loss": 4.235, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.869158878504674e-06, |
|
"loss": 4.3494, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.861370716510904e-06, |
|
"loss": 4.2204, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.853582554517134e-06, |
|
"loss": 4.1544, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.845794392523365e-06, |
|
"loss": 4.3043, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.838006230529596e-06, |
|
"loss": 4.1277, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.8302180685358264e-06, |
|
"loss": 4.2267, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.822429906542056e-06, |
|
"loss": 4.0569, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.814641744548287e-06, |
|
"loss": 4.1306, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.806853582554518e-06, |
|
"loss": 4.1544, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.799065420560749e-06, |
|
"loss": 4.163, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.791277258566978e-06, |
|
"loss": 4.1422, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.783489096573209e-06, |
|
"loss": 4.0082, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.77570093457944e-06, |
|
"loss": 4.1074, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.767912772585671e-06, |
|
"loss": 4.2203, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.760124610591901e-06, |
|
"loss": 3.9526, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.752336448598131e-06, |
|
"loss": 4.1534, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.744548286604362e-06, |
|
"loss": 3.8842, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.736760124610593e-06, |
|
"loss": 4.0548, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.728971962616823e-06, |
|
"loss": 4.1211, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.7211838006230535e-06, |
|
"loss": 4.3269, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.713395638629284e-06, |
|
"loss": 4.2704, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.705607476635515e-06, |
|
"loss": 4.1032, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.697819314641744e-06, |
|
"loss": 4.0135, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.690031152647976e-06, |
|
"loss": 4.0584, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.682242990654206e-06, |
|
"loss": 4.4133, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.674454828660437e-06, |
|
"loss": 4.2614, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 4.1323, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.658878504672898e-06, |
|
"loss": 4.0976, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.651090342679129e-06, |
|
"loss": 4.3011, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.643302180685359e-06, |
|
"loss": 4.2862, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.635514018691589e-06, |
|
"loss": 4.3075, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.6277258566978195e-06, |
|
"loss": 4.0608, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.619937694704051e-06, |
|
"loss": 3.9009, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.6121495327102815e-06, |
|
"loss": 3.9526, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.604361370716511e-06, |
|
"loss": 4.2061, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.596573208722742e-06, |
|
"loss": 4.2188, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.588785046728972e-06, |
|
"loss": 4.2515, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.580996884735204e-06, |
|
"loss": 4.3102, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.5732087227414335e-06, |
|
"loss": 4.1677, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.565420560747664e-06, |
|
"loss": 4.3255, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.557632398753895e-06, |
|
"loss": 4.3894, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.549844236760126e-06, |
|
"loss": 3.8381, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.542056074766355e-06, |
|
"loss": 4.3644, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.534267912772586e-06, |
|
"loss": 5.1024, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.526479750778817e-06, |
|
"loss": 3.9585, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.5186915887850474e-06, |
|
"loss": 4.1528, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.510903426791277e-06, |
|
"loss": 4.2389, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.5031152647975086e-06, |
|
"loss": 4.2541, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.495327102803739e-06, |
|
"loss": 3.9615, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.48753894080997e-06, |
|
"loss": 4.3285, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.479750778816199e-06, |
|
"loss": 4.0789, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.47196261682243e-06, |
|
"loss": 4.2419, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.464174454828661e-06, |
|
"loss": 3.964, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.456386292834892e-06, |
|
"loss": 4.2719, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.448598130841122e-06, |
|
"loss": 4.0885, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.440809968847352e-06, |
|
"loss": 3.9977, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.433021806853583e-06, |
|
"loss": 4.2037, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.425233644859814e-06, |
|
"loss": 4.0194, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.417445482866044e-06, |
|
"loss": 4.0991, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.4096573208722745e-06, |
|
"loss": 4.1482, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.401869158878505e-06, |
|
"loss": 4.0335, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.3940809968847365e-06, |
|
"loss": 4.1876, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.386292834890966e-06, |
|
"loss": 4.1106, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.378504672897197e-06, |
|
"loss": 4.119, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.370716510903427e-06, |
|
"loss": 4.055, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.362928348909658e-06, |
|
"loss": 4.1879, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.355140186915888e-06, |
|
"loss": 4.0615, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.347352024922119e-06, |
|
"loss": 4.2151, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.33956386292835e-06, |
|
"loss": 4.2856, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.33177570093458e-06, |
|
"loss": 4.2555, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.32398753894081e-06, |
|
"loss": 4.1419, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.3161993769470405e-06, |
|
"loss": 4.4471, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.308411214953272e-06, |
|
"loss": 4.1127, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.3006230529595025e-06, |
|
"loss": 3.8977, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.292834890965732e-06, |
|
"loss": 4.0435, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.285046728971963e-06, |
|
"loss": 3.8639, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.277258566978194e-06, |
|
"loss": 3.8992, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.269470404984425e-06, |
|
"loss": 4.2917, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.2616822429906544e-06, |
|
"loss": 4.2252, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.253894080996885e-06, |
|
"loss": 4.0874, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.246105919003116e-06, |
|
"loss": 4.0801, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.238317757009347e-06, |
|
"loss": 4.2029, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.230529595015577e-06, |
|
"loss": 4.1829, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.222741433021807e-06, |
|
"loss": 4.3305, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.214953271028038e-06, |
|
"loss": 4.0888, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.207165109034268e-06, |
|
"loss": 4.1067, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.199376947040498e-06, |
|
"loss": 4.6241, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.1915887850467296e-06, |
|
"loss": 4.32, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.18380062305296e-06, |
|
"loss": 4.2275, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.176012461059191e-06, |
|
"loss": 4.1988, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.16822429906542e-06, |
|
"loss": 3.833, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.160436137071652e-06, |
|
"loss": 4.2188, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.152647975077882e-06, |
|
"loss": 4.8919, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.144859813084113e-06, |
|
"loss": 4.4446, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.137071651090343e-06, |
|
"loss": 4.5516, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 4.193282127380371, |
|
"eval_runtime": 967.5229, |
|
"eval_samples_per_second": 2.731, |
|
"eval_steps_per_second": 0.342, |
|
"eval_wer": 1.9631495438318127, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.129283489096573e-06, |
|
"loss": 4.3788, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.121495327102805e-06, |
|
"loss": 4.1797, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.113707165109035e-06, |
|
"loss": 4.2658, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.105919003115265e-06, |
|
"loss": 4.1873, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.0981308411214955e-06, |
|
"loss": 4.1803, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.090342679127726e-06, |
|
"loss": 4.2236, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.0825545171339575e-06, |
|
"loss": 4.0233, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.074766355140187e-06, |
|
"loss": 4.256, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.066978193146418e-06, |
|
"loss": 4.1934, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.059190031152648e-06, |
|
"loss": 4.0831, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.05140186915888e-06, |
|
"loss": 4.2089, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.0436137071651095e-06, |
|
"loss": 4.1421, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.03582554517134e-06, |
|
"loss": 4.2325, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.028037383177571e-06, |
|
"loss": 4.2433, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.020249221183801e-06, |
|
"loss": 4.4532, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.012461059190031e-06, |
|
"loss": 4.1758, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.004672897196262e-06, |
|
"loss": 4.3027, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.996884735202493e-06, |
|
"loss": 4.2042, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.9890965732087235e-06, |
|
"loss": 4.0865, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.981308411214953e-06, |
|
"loss": 4.1832, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.973520249221184e-06, |
|
"loss": 4.1642, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.965732087227415e-06, |
|
"loss": 4.2137, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.957943925233646e-06, |
|
"loss": 4.1632, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.9501557632398754e-06, |
|
"loss": 4.1937, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.942367601246106e-06, |
|
"loss": 4.0946, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.9345794392523374e-06, |
|
"loss": 3.8531, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.926791277258568e-06, |
|
"loss": 4.2643, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.919003115264798e-06, |
|
"loss": 4.1425, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.911214953271028e-06, |
|
"loss": 4.1611, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.903426791277259e-06, |
|
"loss": 4.2984, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.89563862928349e-06, |
|
"loss": 4.1692, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.88785046728972e-06, |
|
"loss": 4.0564, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.8800623052959505e-06, |
|
"loss": 4.2466, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.872274143302181e-06, |
|
"loss": 3.9432, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.864485981308412e-06, |
|
"loss": 4.129, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.856697819314641e-06, |
|
"loss": 4.1524, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.848909657320873e-06, |
|
"loss": 4.1989, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.841121495327103e-06, |
|
"loss": 4.2409, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 4.2857, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.825545171339564e-06, |
|
"loss": 4.3158, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.817757009345795e-06, |
|
"loss": 4.4024, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.809968847352026e-06, |
|
"loss": 4.4, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.802180685358256e-06, |
|
"loss": 4.4728, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.794392523364486e-06, |
|
"loss": 4.2102, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7866043613707165e-06, |
|
"loss": 4.3307, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.778816199376948e-06, |
|
"loss": 4.2995, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7710280373831785e-06, |
|
"loss": 4.2172, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.763239875389408e-06, |
|
"loss": 4.134, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.755451713395639e-06, |
|
"loss": 4.3218, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.747663551401869e-06, |
|
"loss": 5.1369, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.739875389408101e-06, |
|
"loss": 4.4825, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7320872274143305e-06, |
|
"loss": 4.136, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.724299065420561e-06, |
|
"loss": 4.066, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.716510903426792e-06, |
|
"loss": 3.9173, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.708722741433023e-06, |
|
"loss": 4.0663, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.700934579439253e-06, |
|
"loss": 4.1526, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.693146417445483e-06, |
|
"loss": 4.3578, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.685358255451714e-06, |
|
"loss": 4.1278, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.6775700934579444e-06, |
|
"loss": 4.1886, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.669781931464174e-06, |
|
"loss": 4.0427, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.661993769470406e-06, |
|
"loss": 4.0473, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.654205607476636e-06, |
|
"loss": 4.2464, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.646417445482867e-06, |
|
"loss": 4.3286, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.6386292834890964e-06, |
|
"loss": 3.8247, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.630841121495327e-06, |
|
"loss": 4.1617, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.623052959501558e-06, |
|
"loss": 3.852, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.615264797507789e-06, |
|
"loss": 4.2365, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.607476635514019e-06, |
|
"loss": 4.06, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.599688473520249e-06, |
|
"loss": 4.358, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.591900311526481e-06, |
|
"loss": 3.9511, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.584112149532711e-06, |
|
"loss": 4.1684, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.576323987538941e-06, |
|
"loss": 3.9133, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.5685358255451715e-06, |
|
"loss": 4.0064, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.560747663551402e-06, |
|
"loss": 4.1022, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.5529595015576335e-06, |
|
"loss": 4.1316, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.545171339563863e-06, |
|
"loss": 4.2525, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.537383177570094e-06, |
|
"loss": 4.147, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.529595015576324e-06, |
|
"loss": 4.1609, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.521806853582555e-06, |
|
"loss": 4.133, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.514018691588785e-06, |
|
"loss": 4.3225, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.506230529595016e-06, |
|
"loss": 4.1003, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.498442367601247e-06, |
|
"loss": 4.1658, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.490654205607477e-06, |
|
"loss": 3.9802, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.482866043613707e-06, |
|
"loss": 4.2052, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4750778816199375e-06, |
|
"loss": 4.0707, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.467289719626169e-06, |
|
"loss": 4.2826, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4595015576323995e-06, |
|
"loss": 4.379, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.451713395638629e-06, |
|
"loss": 4.2049, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.44392523364486e-06, |
|
"loss": 4.3123, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.436137071651091e-06, |
|
"loss": 4.1278, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.428348909657322e-06, |
|
"loss": 4.0756, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4205607476635515e-06, |
|
"loss": 4.2862, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.412772585669782e-06, |
|
"loss": 4.3933, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.404984423676013e-06, |
|
"loss": 4.2723, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.397196261682244e-06, |
|
"loss": 4.3168, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.389408099688474e-06, |
|
"loss": 4.6031, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.381619937694704e-06, |
|
"loss": 4.3307, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.373831775700935e-06, |
|
"loss": 4.9645, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.3660436137071654e-06, |
|
"loss": 4.4141, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.358255451713395e-06, |
|
"loss": 4.7846, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.350467289719627e-06, |
|
"loss": 3.7951, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.342679127725857e-06, |
|
"loss": 4.3364, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.334890965732088e-06, |
|
"loss": 4.2048, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.3271028037383174e-06, |
|
"loss": 3.9806, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.319314641744549e-06, |
|
"loss": 3.9552, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.311526479750779e-06, |
|
"loss": 4.0356, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.30373831775701e-06, |
|
"loss": 3.8279, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.29595015576324e-06, |
|
"loss": 4.0355, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.28816199376947e-06, |
|
"loss": 4.2657, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.280373831775702e-06, |
|
"loss": 4.1918, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.272585669781932e-06, |
|
"loss": 3.9591, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.264797507788162e-06, |
|
"loss": 4.3648, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.2570093457943925e-06, |
|
"loss": 3.8308, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.249221183800623e-06, |
|
"loss": 4.2053, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.2414330218068545e-06, |
|
"loss": 4.3564, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.233644859813084e-06, |
|
"loss": 4.2272, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.225856697819315e-06, |
|
"loss": 4.1514, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.218068535825545e-06, |
|
"loss": 4.1553, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.210280373831777e-06, |
|
"loss": 4.1104, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.2024922118380065e-06, |
|
"loss": 4.0083, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.194704049844237e-06, |
|
"loss": 4.2151, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.186915887850468e-06, |
|
"loss": 4.1123, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.179127725856698e-06, |
|
"loss": 3.9473, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.171339563862928e-06, |
|
"loss": 4.0857, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.163551401869159e-06, |
|
"loss": 4.3375, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.15576323987539e-06, |
|
"loss": 3.9099, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.1479750778816205e-06, |
|
"loss": 4.0526, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.14018691588785e-06, |
|
"loss": 4.4883, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.132398753894081e-06, |
|
"loss": 3.8129, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.124610591900312e-06, |
|
"loss": 4.4434, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.116822429906543e-06, |
|
"loss": 4.3206, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.1090342679127725e-06, |
|
"loss": 4.1311, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.101246105919003e-06, |
|
"loss": 3.9958, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.0934579439252344e-06, |
|
"loss": 4.2079, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.085669781931465e-06, |
|
"loss": 4.4248, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.077881619937695e-06, |
|
"loss": 4.1655, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.070093457943925e-06, |
|
"loss": 4.1402, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.062305295950156e-06, |
|
"loss": 4.3322, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.054517133956387e-06, |
|
"loss": 4.0982, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.046728971962617e-06, |
|
"loss": 4.1337, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.0389408099688476e-06, |
|
"loss": 4.1508, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.031152647975078e-06, |
|
"loss": 4.5215, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.023364485981309e-06, |
|
"loss": 4.3198, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.0155763239875384e-06, |
|
"loss": 4.4469, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.00778816199377e-06, |
|
"loss": 4.223, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5e-06, |
|
"loss": 4.312, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.992211838006231e-06, |
|
"loss": 4.2913, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.9844236760124615e-06, |
|
"loss": 4.0841, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.976635514018692e-06, |
|
"loss": 4.0537, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.968847352024923e-06, |
|
"loss": 4.2846, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.961059190031153e-06, |
|
"loss": 4.0292, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.953271028037384e-06, |
|
"loss": 4.1775, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.945482866043614e-06, |
|
"loss": 4.1541, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.937694704049845e-06, |
|
"loss": 4.3, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.9299065420560755e-06, |
|
"loss": 4.3025, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.922118380062306e-06, |
|
"loss": 4.2158, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.914330218068537e-06, |
|
"loss": 4.1281, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.906542056074766e-06, |
|
"loss": 4.1585, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.898753894080998e-06, |
|
"loss": 4.0229, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.8909657320872275e-06, |
|
"loss": 3.938, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.883177570093459e-06, |
|
"loss": 4.3063, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.875389408099689e-06, |
|
"loss": 4.1413, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.86760124610592e-06, |
|
"loss": 4.1543, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.85981308411215e-06, |
|
"loss": 4.0323, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.85202492211838e-06, |
|
"loss": 4.0991, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.844236760124611e-06, |
|
"loss": 4.2563, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.8364485981308415e-06, |
|
"loss": 4.0773, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.828660436137072e-06, |
|
"loss": 3.885, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.820872274143303e-06, |
|
"loss": 4.2737, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.813084112149533e-06, |
|
"loss": 4.3106, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.805295950155764e-06, |
|
"loss": 3.8121, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.797507788161994e-06, |
|
"loss": 3.9958, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.789719626168225e-06, |
|
"loss": 4.3303, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7819314641744554e-06, |
|
"loss": 4.0122, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.774143302180686e-06, |
|
"loss": 3.9509, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.766355140186917e-06, |
|
"loss": 4.2171, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.758566978193147e-06, |
|
"loss": 4.2888, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.750778816199378e-06, |
|
"loss": 4.1918, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.742990654205608e-06, |
|
"loss": 3.8042, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.735202492211838e-06, |
|
"loss": 4.1654, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.727414330218069e-06, |
|
"loss": 4.0167, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.719626168224299e-06, |
|
"loss": 4.061, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7118380062305305e-06, |
|
"loss": 4.0102, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.70404984423676e-06, |
|
"loss": 4.0291, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.696261682242992e-06, |
|
"loss": 4.5174, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.688473520249221e-06, |
|
"loss": 3.9473, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.680685358255452e-06, |
|
"loss": 4.2058, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6728971962616825e-06, |
|
"loss": 4.0955, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.665109034267913e-06, |
|
"loss": 3.9993, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.657320872274144e-06, |
|
"loss": 3.8747, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.649532710280374e-06, |
|
"loss": 4.248, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.641744548286605e-06, |
|
"loss": 4.2143, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.633956386292835e-06, |
|
"loss": 4.6084, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.626168224299066e-06, |
|
"loss": 4.2167, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6183800623052965e-06, |
|
"loss": 4.271, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.610591900311527e-06, |
|
"loss": 4.0273, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.602803738317758e-06, |
|
"loss": 4.1442, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.595015576323988e-06, |
|
"loss": 4.4688, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.587227414330219e-06, |
|
"loss": 4.3755, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.579439252336449e-06, |
|
"loss": 4.0521, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.57165109034268e-06, |
|
"loss": 4.0854, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.56386292834891e-06, |
|
"loss": 4.0831, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.556074766355141e-06, |
|
"loss": 4.0433, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.548286604361371e-06, |
|
"loss": 3.9098, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.540498442367602e-06, |
|
"loss": 3.9166, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.532710280373832e-06, |
|
"loss": 4.1898, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.524922118380063e-06, |
|
"loss": 4.3004, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.517133956386293e-06, |
|
"loss": 4.0949, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.509345794392524e-06, |
|
"loss": 4.026, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.501557632398754e-06, |
|
"loss": 3.9044, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.493769470404985e-06, |
|
"loss": 4.2022, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.485981308411215e-06, |
|
"loss": 4.1752, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.478193146417446e-06, |
|
"loss": 4.2165, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.4704049844236764e-06, |
|
"loss": 4.0323, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.462616822429907e-06, |
|
"loss": 4.1113, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.4548286604361376e-06, |
|
"loss": 4.1311, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.447040498442368e-06, |
|
"loss": 4.0043, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.439252336448599e-06, |
|
"loss": 3.9933, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.431464174454829e-06, |
|
"loss": 3.9222, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.42367601246106e-06, |
|
"loss": 4.2463, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.41588785046729e-06, |
|
"loss": 4.1395, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.40809968847352e-06, |
|
"loss": 3.9082, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.4003115264797515e-06, |
|
"loss": 4.2575, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.392523364485981e-06, |
|
"loss": 4.0382, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.384735202492213e-06, |
|
"loss": 4.178, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.376947040498442e-06, |
|
"loss": 4.2366, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.369158878504674e-06, |
|
"loss": 4.1046, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3613707165109035e-06, |
|
"loss": 4.0908, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.353582554517134e-06, |
|
"loss": 4.1297, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.345794392523365e-06, |
|
"loss": 4.1482, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.338006230529595e-06, |
|
"loss": 4.1762, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.330218068535826e-06, |
|
"loss": 4.0023, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.322429906542056e-06, |
|
"loss": 4.1147, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.314641744548287e-06, |
|
"loss": 4.0022, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3068535825545175e-06, |
|
"loss": 4.4228, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.299065420560748e-06, |
|
"loss": 4.1197, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.291277258566979e-06, |
|
"loss": 4.0508, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.283489096573209e-06, |
|
"loss": 3.9716, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.27570093457944e-06, |
|
"loss": 4.2264, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.26791277258567e-06, |
|
"loss": 4.2202, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.260124610591901e-06, |
|
"loss": 4.2557, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2523364485981315e-06, |
|
"loss": 4.5929, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.244548286604362e-06, |
|
"loss": 4.1721, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.236760124610592e-06, |
|
"loss": 4.2718, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.228971962616823e-06, |
|
"loss": 3.9251, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.221183800623053e-06, |
|
"loss": 4.4249, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.213395638629284e-06, |
|
"loss": 4.3426, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.205607476635514e-06, |
|
"loss": 4.2281, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.1978193146417454e-06, |
|
"loss": 4.4998, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.190031152647975e-06, |
|
"loss": 4.7019, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.182242990654206e-06, |
|
"loss": 4.2562, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.174454828660436e-06, |
|
"loss": 3.9828, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 4.2231, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.1588785046728974e-06, |
|
"loss": 4.2046, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.151090342679128e-06, |
|
"loss": 4.3099, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.1433021806853586e-06, |
|
"loss": 4.1074, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.135514018691589e-06, |
|
"loss": 4.0279, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.12772585669782e-06, |
|
"loss": 3.8578, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.11993769470405e-06, |
|
"loss": 4.114, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.112149532710281e-06, |
|
"loss": 4.4181, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.104361370716511e-06, |
|
"loss": 3.9594, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.096573208722742e-06, |
|
"loss": 4.2227, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0887850467289725e-06, |
|
"loss": 4.282, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.080996884735203e-06, |
|
"loss": 4.1333, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.073208722741434e-06, |
|
"loss": 4.2538, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.065420560747663e-06, |
|
"loss": 4.2075, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.057632398753895e-06, |
|
"loss": 3.9943, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0498442367601245e-06, |
|
"loss": 4.0465, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.042056074766356e-06, |
|
"loss": 4.25, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.034267912772586e-06, |
|
"loss": 4.0239, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.026479750778817e-06, |
|
"loss": 4.0169, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.018691588785047e-06, |
|
"loss": 4.0392, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.010903426791277e-06, |
|
"loss": 4.3259, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.003115264797508e-06, |
|
"loss": 4.0951, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.9953271028037385e-06, |
|
"loss": 4.1215, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.987538940809969e-06, |
|
"loss": 4.3889, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.9797507788162e-06, |
|
"loss": 4.2716, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.97196261682243e-06, |
|
"loss": 4.1846, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.964174454828661e-06, |
|
"loss": 4.3184, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.956386292834891e-06, |
|
"loss": 4.2463, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.948598130841122e-06, |
|
"loss": 4.2335, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.9408099688473525e-06, |
|
"loss": 4.1308, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.933021806853583e-06, |
|
"loss": 4.3037, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.925233644859814e-06, |
|
"loss": 4.1217, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.917445482866044e-06, |
|
"loss": 4.1258, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.909657320872275e-06, |
|
"loss": 4.2059, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.901869158878505e-06, |
|
"loss": 4.1726, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.894080996884735e-06, |
|
"loss": 4.1503, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8862928348909664e-06, |
|
"loss": 4.1097, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.878504672897196e-06, |
|
"loss": 3.8494, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8707165109034276e-06, |
|
"loss": 4.2896, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.862928348909657e-06, |
|
"loss": 4.2214, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.855140186915889e-06, |
|
"loss": 4.5536, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.8473520249221184e-06, |
|
"loss": 4.0409, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.839563862928349e-06, |
|
"loss": 4.0086, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.8317757009345796e-06, |
|
"loss": 3.9713, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.82398753894081e-06, |
|
"loss": 4.0987, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.816199376947041e-06, |
|
"loss": 3.8121, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.8084112149532717e-06, |
|
"loss": 4.605, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.800623052959502e-06, |
|
"loss": 3.9685, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7928348909657324e-06, |
|
"loss": 4.1792, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.785046728971963e-06, |
|
"loss": 4.3046, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7772585669781935e-06, |
|
"loss": 4.1729, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7694704049844237e-06, |
|
"loss": 4.1799, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7616822429906547e-06, |
|
"loss": 4.0123, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.753894080996885e-06, |
|
"loss": 4.1203, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.746105919003116e-06, |
|
"loss": 4.1308, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.738317757009346e-06, |
|
"loss": 3.9784, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.730529595015577e-06, |
|
"loss": 4.1335, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.722741433021807e-06, |
|
"loss": 3.9524, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7149532710280376e-06, |
|
"loss": 4.0933, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7071651090342682e-06, |
|
"loss": 4.2002, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6993769470404988e-06, |
|
"loss": 4.13, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.691588785046729e-06, |
|
"loss": 3.9745, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.68380062305296e-06, |
|
"loss": 4.3192, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.67601246105919e-06, |
|
"loss": 4.0568, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.668224299065421e-06, |
|
"loss": 4.0708, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.660436137071651e-06, |
|
"loss": 4.4502, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.652647975077882e-06, |
|
"loss": 4.0721, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6448598130841123e-06, |
|
"loss": 3.8307, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.637071651090343e-06, |
|
"loss": 3.8618, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6292834890965735e-06, |
|
"loss": 4.0824, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.621495327102804e-06, |
|
"loss": 4.032, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6137071651090346e-06, |
|
"loss": 4.1028, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.605919003115265e-06, |
|
"loss": 4.0867, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.5981308411214953e-06, |
|
"loss": 3.9956, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.5903426791277263e-06, |
|
"loss": 3.8514, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.5825545171339564e-06, |
|
"loss": 3.9808, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.5747663551401874e-06, |
|
"loss": 4.2346, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5669781931464176e-06, |
|
"loss": 4.1826, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5591900311526486e-06, |
|
"loss": 3.9369, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5514018691588787e-06, |
|
"loss": 3.992, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5436137071651093e-06, |
|
"loss": 4.1111, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.53582554517134e-06, |
|
"loss": 4.168, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5280373831775704e-06, |
|
"loss": 3.9429, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5202492211838006e-06, |
|
"loss": 4.0715, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5124610591900315e-06, |
|
"loss": 4.2575, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5046728971962617e-06, |
|
"loss": 4.3182, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.4968847352024927e-06, |
|
"loss": 4.3777, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.489096573208723e-06, |
|
"loss": 4.3152, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.481308411214954e-06, |
|
"loss": 4.351, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.473520249221184e-06, |
|
"loss": 4.0735, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.4657320872274145e-06, |
|
"loss": 4.3591, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.457943925233645e-06, |
|
"loss": 4.0272, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.4501557632398757e-06, |
|
"loss": 4.2471, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.4423676012461062e-06, |
|
"loss": 4.3355, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.434579439252337e-06, |
|
"loss": 4.5643, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.426791277258567e-06, |
|
"loss": 4.1789, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.419003115264798e-06, |
|
"loss": 4.1271, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.411214953271028e-06, |
|
"loss": 4.6198, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.403426791277259e-06, |
|
"loss": 4.3066, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.395638629283489e-06, |
|
"loss": 4.2441, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.38785046728972e-06, |
|
"loss": 4.1849, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3800623052959503e-06, |
|
"loss": 4.2089, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.372274143302181e-06, |
|
"loss": 4.0637, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3644859813084115e-06, |
|
"loss": 4.3941, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.356697819314642e-06, |
|
"loss": 3.9033, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.348909657320872e-06, |
|
"loss": 4.2305, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.341121495327103e-06, |
|
"loss": 4.2735, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 4.5399, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3255451713395643e-06, |
|
"loss": 4.1047, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3177570093457945e-06, |
|
"loss": 4.01, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3099688473520254e-06, |
|
"loss": 4.0298, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3021806853582556e-06, |
|
"loss": 4.4198, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.294392523364486e-06, |
|
"loss": 4.0963, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2866043613707167e-06, |
|
"loss": 3.8943, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2788161993769473e-06, |
|
"loss": 4.1648, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2710280373831774e-06, |
|
"loss": 3.9851, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2632398753894084e-06, |
|
"loss": 4.3469, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2554517133956386e-06, |
|
"loss": 4.3285, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2476635514018696e-06, |
|
"loss": 4.2392, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2398753894080997e-06, |
|
"loss": 3.9897, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2320872274143307e-06, |
|
"loss": 4.1456, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.224299065420561e-06, |
|
"loss": 4.2401, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2165109034267914e-06, |
|
"loss": 3.8848, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.208722741433022e-06, |
|
"loss": 3.9093, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2009345794392525e-06, |
|
"loss": 4.0863, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.193146417445483e-06, |
|
"loss": 4.0817, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.1853582554517137e-06, |
|
"loss": 4.1446, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.177570093457944e-06, |
|
"loss": 4.0396, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.169781931464175e-06, |
|
"loss": 3.8341, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.161993769470405e-06, |
|
"loss": 4.3515, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.154205607476636e-06, |
|
"loss": 4.0265, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.146417445482866e-06, |
|
"loss": 4.2841, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.138629283489097e-06, |
|
"loss": 4.3325, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.1308411214953272e-06, |
|
"loss": 4.2112, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.123052959501558e-06, |
|
"loss": 4.2354, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.1152647975077884e-06, |
|
"loss": 4.3062, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.107476635514019e-06, |
|
"loss": 4.1306, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.099688473520249e-06, |
|
"loss": 4.1757, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.09190031152648e-06, |
|
"loss": 3.9582, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.08411214953271e-06, |
|
"loss": 4.1137, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.076323987538941e-06, |
|
"loss": 4.2392, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.0685358255451713e-06, |
|
"loss": 4.4224, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.0607476635514023e-06, |
|
"loss": 4.2239, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.0529595015576325e-06, |
|
"loss": 4.2015, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.045171339563863e-06, |
|
"loss": 4.2652, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.0373831775700936e-06, |
|
"loss": 4.218, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.029595015576324e-06, |
|
"loss": 4.3943, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.0218068535825547e-06, |
|
"loss": 4.4681, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.0140186915887853e-06, |
|
"loss": 4.0222, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.0062305295950155e-06, |
|
"loss": 4.1626, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9984423676012464e-06, |
|
"loss": 4.046, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9906542056074766e-06, |
|
"loss": 3.968, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9828660436137076e-06, |
|
"loss": 3.9713, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9750778816199377e-06, |
|
"loss": 4.3331, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9672897196261687e-06, |
|
"loss": 4.1618, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.959501557632399e-06, |
|
"loss": 3.9632, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9517133956386294e-06, |
|
"loss": 4.0474, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.94392523364486e-06, |
|
"loss": 3.9065, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9361370716510906e-06, |
|
"loss": 3.9736, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9283489096573207e-06, |
|
"loss": 4.0034, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9205607476635517e-06, |
|
"loss": 4.2439, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.912772585669782e-06, |
|
"loss": 4.169, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.904984423676013e-06, |
|
"loss": 4.2551, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.897196261682243e-06, |
|
"loss": 4.3709, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.889408099688474e-06, |
|
"loss": 4.1193, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.881619937694704e-06, |
|
"loss": 4.11, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8738317757009347e-06, |
|
"loss": 4.1751, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8660436137071652e-06, |
|
"loss": 3.9381, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.858255451713396e-06, |
|
"loss": 4.1451, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8504672897196264e-06, |
|
"loss": 4.2336, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.842679127725857e-06, |
|
"loss": 4.1845, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.834890965732087e-06, |
|
"loss": 4.0748, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.827102803738318e-06, |
|
"loss": 4.2568, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8193146417445482e-06, |
|
"loss": 4.2908, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.811526479750779e-06, |
|
"loss": 4.0898, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8037383177570094e-06, |
|
"loss": 4.0615, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7959501557632403e-06, |
|
"loss": 4.2304, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7881619937694705e-06, |
|
"loss": 4.3306, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.780373831775701e-06, |
|
"loss": 4.1953, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7725856697819316e-06, |
|
"loss": 4.2448, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.764797507788162e-06, |
|
"loss": 4.3706, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7570093457943923e-06, |
|
"loss": 4.0029, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7492211838006233e-06, |
|
"loss": 3.9979, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7414330218068535e-06, |
|
"loss": 3.9537, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7336448598130845e-06, |
|
"loss": 4.3353, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7258566978193146e-06, |
|
"loss": 4.0604, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7180685358255456e-06, |
|
"loss": 4.2371, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7102803738317757e-06, |
|
"loss": 4.1904, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7024922118380063e-06, |
|
"loss": 4.1709, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.694704049844237e-06, |
|
"loss": 4.0803, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6869158878504674e-06, |
|
"loss": 4.0348, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6791277258566976e-06, |
|
"loss": 4.1705, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6713395638629286e-06, |
|
"loss": 4.2714, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6635514018691587e-06, |
|
"loss": 4.2779, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6557632398753897e-06, |
|
"loss": 4.2695, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.64797507788162e-06, |
|
"loss": 4.4568, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.640186915887851e-06, |
|
"loss": 4.4036, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.632398753894081e-06, |
|
"loss": 4.2484, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6246105919003116e-06, |
|
"loss": 4.1209, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.616822429906542e-06, |
|
"loss": 4.1262, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6090342679127727e-06, |
|
"loss": 3.9908, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.6012461059190033e-06, |
|
"loss": 3.9344, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.593457943925234e-06, |
|
"loss": 3.9987, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.585669781931464e-06, |
|
"loss": 4.0693, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.577881619937695e-06, |
|
"loss": 4.33, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.570093457943925e-06, |
|
"loss": 4.135, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.562305295950156e-06, |
|
"loss": 4.0135, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5545171339563862e-06, |
|
"loss": 4.1845, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5467289719626172e-06, |
|
"loss": 4.0899, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5389408099688474e-06, |
|
"loss": 4.2124, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.531152647975078e-06, |
|
"loss": 4.1841, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5233644859813085e-06, |
|
"loss": 4.1286, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.515576323987539e-06, |
|
"loss": 4.3402, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5077881619937692e-06, |
|
"loss": 3.8669, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5e-06, |
|
"loss": 4.0975, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.4922118380062308e-06, |
|
"loss": 4.1772, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.4844236760124613e-06, |
|
"loss": 3.8224, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.476635514018692e-06, |
|
"loss": 3.9468, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.4688473520249225e-06, |
|
"loss": 4.0886, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.461059190031153e-06, |
|
"loss": 3.8841, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.453271028037383e-06, |
|
"loss": 4.0842, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4454828660436138e-06, |
|
"loss": 3.8286, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4376947040498443e-06, |
|
"loss": 4.157, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.429906542056075e-06, |
|
"loss": 4.1427, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4221183800623055e-06, |
|
"loss": 4.2129, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.414330218068536e-06, |
|
"loss": 3.8774, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4065420560747666e-06, |
|
"loss": 4.1796, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.398753894080997e-06, |
|
"loss": 3.8954, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3909657320872277e-06, |
|
"loss": 4.086, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3831775700934583e-06, |
|
"loss": 4.1096, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.375389408099689e-06, |
|
"loss": 4.1745, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.367601246105919e-06, |
|
"loss": 3.7705, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3598130841121496e-06, |
|
"loss": 4.1379, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.35202492211838e-06, |
|
"loss": 4.2426, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3442367601246107e-06, |
|
"loss": 4.3024, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3364485981308413e-06, |
|
"loss": 4.2027, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.328660436137072e-06, |
|
"loss": 4.2042, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.3208722741433024e-06, |
|
"loss": 3.9994, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.313084112149533e-06, |
|
"loss": 4.4417, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.3052959501557635e-06, |
|
"loss": 4.1388, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.297507788161994e-06, |
|
"loss": 3.8407, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2897196261682247e-06, |
|
"loss": 3.9508, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.281931464174455e-06, |
|
"loss": 4.1427, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2741433021806854e-06, |
|
"loss": 4.5171, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.266355140186916e-06, |
|
"loss": 4.2111, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2585669781931465e-06, |
|
"loss": 4.5228, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.250778816199377e-06, |
|
"loss": 4.0727, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2429906542056077e-06, |
|
"loss": 4.3244, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 4.163667678833008, |
|
"eval_runtime": 937.5679, |
|
"eval_samples_per_second": 2.818, |
|
"eval_steps_per_second": 0.353, |
|
"eval_wer": 1.9677310591035304, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2352024922118382e-06, |
|
"loss": 4.2225, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2274143302180688e-06, |
|
"loss": 4.1976, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2196261682242994e-06, |
|
"loss": 4.1452, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.21183800623053e-06, |
|
"loss": 4.0126, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.20404984423676e-06, |
|
"loss": 4.2435, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.1962616822429906e-06, |
|
"loss": 4.0823, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.188473520249221e-06, |
|
"loss": 4.0037, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1806853582554518e-06, |
|
"loss": 4.1042, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1728971962616823e-06, |
|
"loss": 4.1585, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.165109034267913e-06, |
|
"loss": 4.115, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1573208722741435e-06, |
|
"loss": 3.9339, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.149532710280374e-06, |
|
"loss": 4.2524, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1417445482866046e-06, |
|
"loss": 3.9215, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.133956386292835e-06, |
|
"loss": 4.0955, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1261682242990657e-06, |
|
"loss": 4.0697, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.118380062305296e-06, |
|
"loss": 4.3513, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1105919003115264e-06, |
|
"loss": 4.0497, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.102803738317757e-06, |
|
"loss": 4.202, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.0950155763239876e-06, |
|
"loss": 4.3147, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.087227414330218e-06, |
|
"loss": 4.0343, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.0794392523364487e-06, |
|
"loss": 4.0309, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.0716510903426793e-06, |
|
"loss": 4.0231, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.06386292834891e-06, |
|
"loss": 4.2339, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.0560747663551404e-06, |
|
"loss": 4.0068, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.048286604361371e-06, |
|
"loss": 4.0892, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0404984423676016e-06, |
|
"loss": 3.954, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0327102803738317e-06, |
|
"loss": 4.0673, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0249221183800623e-06, |
|
"loss": 4.1037, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.017133956386293e-06, |
|
"loss": 3.6814, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0093457943925234e-06, |
|
"loss": 3.9665, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.001557632398754e-06, |
|
"loss": 3.9757, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9937694704049845e-06, |
|
"loss": 3.881, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.985981308411215e-06, |
|
"loss": 3.987, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9781931464174457e-06, |
|
"loss": 4.1228, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9704049844236762e-06, |
|
"loss": 4.0719, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.962616822429907e-06, |
|
"loss": 3.8651, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9548286604361374e-06, |
|
"loss": 3.9827, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9470404984423675e-06, |
|
"loss": 4.3563, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.939252336448598e-06, |
|
"loss": 4.0539, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9314641744548286e-06, |
|
"loss": 4.1672, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9236760124610592e-06, |
|
"loss": 4.2428, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9158878504672898e-06, |
|
"loss": 3.8434, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9080996884735203e-06, |
|
"loss": 4.2846, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.900311526479751e-06, |
|
"loss": 4.1367, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8925233644859815e-06, |
|
"loss": 4.483, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8847352024922118e-06, |
|
"loss": 3.9643, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8769470404984424e-06, |
|
"loss": 4.3356, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.869158878504673e-06, |
|
"loss": 4.2803, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8613707165109035e-06, |
|
"loss": 4.5188, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8535825545171341e-06, |
|
"loss": 3.9971, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8457943925233645e-06, |
|
"loss": 4.2894, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.838006230529595e-06, |
|
"loss": 4.1488, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8302180685358256e-06, |
|
"loss": 4.0819, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8224299065420562e-06, |
|
"loss": 3.9617, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8146417445482867e-06, |
|
"loss": 4.2379, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8068535825545173e-06, |
|
"loss": 4.4305, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7990654205607477e-06, |
|
"loss": 4.2211, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7912772585669782e-06, |
|
"loss": 4.2781, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7834890965732088e-06, |
|
"loss": 3.9446, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7757009345794394e-06, |
|
"loss": 4.1816, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.76791277258567e-06, |
|
"loss": 4.212, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7601246105919003e-06, |
|
"loss": 4.1423, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7523364485981308e-06, |
|
"loss": 4.2742, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7445482866043614e-06, |
|
"loss": 4.0014, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.736760124610592e-06, |
|
"loss": 4.1963, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7289719626168225e-06, |
|
"loss": 4.0056, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7211838006230531e-06, |
|
"loss": 3.8707, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7133956386292835e-06, |
|
"loss": 4.1321, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.705607476635514e-06, |
|
"loss": 4.1964, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6978193146417446e-06, |
|
"loss": 4.005, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6900311526479752e-06, |
|
"loss": 4.1227, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6822429906542057e-06, |
|
"loss": 4.3301, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.674454828660436e-06, |
|
"loss": 4.2632, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 3.9618, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6588785046728972e-06, |
|
"loss": 3.9232, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6510903426791278e-06, |
|
"loss": 4.0514, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6433021806853584e-06, |
|
"loss": 3.9364, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6355140186915887e-06, |
|
"loss": 4.1753, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6277258566978193e-06, |
|
"loss": 4.1342, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6199376947040499e-06, |
|
"loss": 4.296, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6121495327102804e-06, |
|
"loss": 4.2323, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.604361370716511e-06, |
|
"loss": 4.2442, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5965732087227416e-06, |
|
"loss": 4.1366, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.588785046728972e-06, |
|
"loss": 4.0628, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5809968847352025e-06, |
|
"loss": 4.023, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.573208722741433e-06, |
|
"loss": 4.0632, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5654205607476636e-06, |
|
"loss": 4.0573, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5576323987538942e-06, |
|
"loss": 4.3207, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5498442367601245e-06, |
|
"loss": 4.0209, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.542056074766355e-06, |
|
"loss": 3.9834, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5342679127725857e-06, |
|
"loss": 4.1624, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5264797507788162e-06, |
|
"loss": 4.0292, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5186915887850468e-06, |
|
"loss": 4.0384, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5109034267912774e-06, |
|
"loss": 4.2088, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5031152647975077e-06, |
|
"loss": 4.1387, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.4953271028037383e-06, |
|
"loss": 4.3971, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4875389408099689e-06, |
|
"loss": 4.2672, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4797507788161994e-06, |
|
"loss": 4.4688, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.47196261682243e-06, |
|
"loss": 5.0842, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4641744548286604e-06, |
|
"loss": 4.7529, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.456386292834891e-06, |
|
"loss": 4.0651, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4485981308411215e-06, |
|
"loss": 3.9421, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.440809968847352e-06, |
|
"loss": 4.0441, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4330218068535826e-06, |
|
"loss": 4.1879, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4252336448598132e-06, |
|
"loss": 3.9251, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4174454828660435e-06, |
|
"loss": 4.18, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4096573208722741e-06, |
|
"loss": 4.0795, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4018691588785047e-06, |
|
"loss": 3.987, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3940809968847352e-06, |
|
"loss": 4.3028, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3862928348909658e-06, |
|
"loss": 3.9616, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3785046728971962e-06, |
|
"loss": 4.3072, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3707165109034267e-06, |
|
"loss": 4.0095, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3629283489096573e-06, |
|
"loss": 4.0879, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3551401869158879e-06, |
|
"loss": 3.9143, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3473520249221184e-06, |
|
"loss": 4.1465, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3395638629283488e-06, |
|
"loss": 4.0208, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3317757009345794e-06, |
|
"loss": 4.1202, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.32398753894081e-06, |
|
"loss": 4.0834, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3161993769470405e-06, |
|
"loss": 4.1028, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.308411214953271e-06, |
|
"loss": 4.1615, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3006230529595016e-06, |
|
"loss": 4.156, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.292834890965732e-06, |
|
"loss": 3.9108, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2850467289719625e-06, |
|
"loss": 3.9513, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2772585669781931e-06, |
|
"loss": 3.9722, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2694704049844237e-06, |
|
"loss": 4.0898, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2616822429906543e-06, |
|
"loss": 4.2171, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2538940809968846e-06, |
|
"loss": 4.1579, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2461059190031154e-06, |
|
"loss": 3.9798, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.238317757009346e-06, |
|
"loss": 4.2394, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2305295950155765e-06, |
|
"loss": 3.9483, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2227414330218069e-06, |
|
"loss": 4.1873, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2149532710280374e-06, |
|
"loss": 4.1605, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.207165109034268e-06, |
|
"loss": 4.1107, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1993769470404986e-06, |
|
"loss": 4.0791, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1915887850467291e-06, |
|
"loss": 4.1572, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1838006230529595e-06, |
|
"loss": 4.0076, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.17601246105919e-06, |
|
"loss": 4.1151, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1682242990654206e-06, |
|
"loss": 4.1603, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1604361370716512e-06, |
|
"loss": 3.9975, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1526479750778818e-06, |
|
"loss": 4.1501, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1448598130841123e-06, |
|
"loss": 4.2186, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1370716510903427e-06, |
|
"loss": 4.2127, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1292834890965733e-06, |
|
"loss": 4.0611, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1214953271028038e-06, |
|
"loss": 4.2406, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1137071651090344e-06, |
|
"loss": 4.3732, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.105919003115265e-06, |
|
"loss": 4.5542, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.0981308411214953e-06, |
|
"loss": 3.9714, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.0903426791277259e-06, |
|
"loss": 4.6388, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.0825545171339565e-06, |
|
"loss": 4.4492, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.074766355140187e-06, |
|
"loss": 4.0948, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0669781931464176e-06, |
|
"loss": 4.235, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.059190031152648e-06, |
|
"loss": 4.1225, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0514018691588785e-06, |
|
"loss": 4.087, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.043613707165109e-06, |
|
"loss": 4.0605, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0358255451713396e-06, |
|
"loss": 3.933, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0280373831775702e-06, |
|
"loss": 4.0393, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0202492211838008e-06, |
|
"loss": 4.2088, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0124610591900311e-06, |
|
"loss": 4.1376, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0046728971962617e-06, |
|
"loss": 3.9999, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.968847352024923e-07, |
|
"loss": 4.1607, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.890965732087228e-07, |
|
"loss": 4.2408, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.813084112149534e-07, |
|
"loss": 4.2424, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.735202492211838e-07, |
|
"loss": 3.8438, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.657320872274143e-07, |
|
"loss": 4.1803, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.579439252336449e-07, |
|
"loss": 3.9555, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.501557632398755e-07, |
|
"loss": 4.0786, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.423676012461059e-07, |
|
"loss": 3.9794, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.345794392523365e-07, |
|
"loss": 4.0234, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.267912772585671e-07, |
|
"loss": 4.0982, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.190031152647975e-07, |
|
"loss": 4.0215, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.112149532710281e-07, |
|
"loss": 4.0057, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.034267912772586e-07, |
|
"loss": 4.186, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.956386292834891e-07, |
|
"loss": 4.011, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.878504672897197e-07, |
|
"loss": 4.1485, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.800623052959501e-07, |
|
"loss": 4.3454, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.722741433021807e-07, |
|
"loss": 4.0517, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.644859813084113e-07, |
|
"loss": 3.8949, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.566978193146417e-07, |
|
"loss": 4.1181, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.489096573208723e-07, |
|
"loss": 4.2358, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.411214953271029e-07, |
|
"loss": 4.2429, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 3.8293, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.255451713395639e-07, |
|
"loss": 3.8545, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.177570093457944e-07, |
|
"loss": 4.3273, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.099688473520249e-07, |
|
"loss": 3.906, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.021806853582555e-07, |
|
"loss": 4.2075, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.94392523364486e-07, |
|
"loss": 4.1783, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.866043613707165e-07, |
|
"loss": 4.0099, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.788161993769471e-07, |
|
"loss": 4.25, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.710280373831776e-07, |
|
"loss": 4.1893, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.632398753894081e-07, |
|
"loss": 3.9988, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.554517133956387e-07, |
|
"loss": 4.3922, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.476635514018691e-07, |
|
"loss": 4.187, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.398753894080997e-07, |
|
"loss": 4.4851, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.320872274143302e-07, |
|
"loss": 4.4534, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.242990654205607e-07, |
|
"loss": 4.4251, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.165109034267913e-07, |
|
"loss": 4.3853, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.087227414330218e-07, |
|
"loss": 4.2489, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.009345794392523e-07, |
|
"loss": 4.6089, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.931464174454829e-07, |
|
"loss": 4.637, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.853582554517134e-07, |
|
"loss": 4.5167, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.775700934579439e-07, |
|
"loss": 4.2051, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.697819314641744e-07, |
|
"loss": 3.988, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.61993769470405e-07, |
|
"loss": 3.9624, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.542056074766355e-07, |
|
"loss": 3.9765, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.46417445482866e-07, |
|
"loss": 4.0011, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.386292834890966e-07, |
|
"loss": 3.9486, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.308411214953271e-07, |
|
"loss": 3.997, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.230529595015577e-07, |
|
"loss": 4.1335, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.152647975077883e-07, |
|
"loss": 4.2859, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.074766355140187e-07, |
|
"loss": 4.2705, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.996884735202493e-07, |
|
"loss": 3.9762, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.919003115264798e-07, |
|
"loss": 4.1652, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.841121495327103e-07, |
|
"loss": 4.3689, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.763239875389409e-07, |
|
"loss": 4.0613, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.685358255451713e-07, |
|
"loss": 4.0976, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.607476635514019e-07, |
|
"loss": 4.1041, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.529595015576325e-07, |
|
"loss": 4.4388, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.451713395638629e-07, |
|
"loss": 4.3779, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.373831775700935e-07, |
|
"loss": 3.809, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.29595015576324e-07, |
|
"loss": 3.9718, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.218068535825545e-07, |
|
"loss": 4.1004, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.140186915887851e-07, |
|
"loss": 4.0846, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.062305295950156e-07, |
|
"loss": 4.2538, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.984423676012461e-07, |
|
"loss": 4.2074, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.906542056074767e-07, |
|
"loss": 4.1505, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.828660436137072e-07, |
|
"loss": 4.1144, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.7507788161993773e-07, |
|
"loss": 4.1776, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.6728971962616824e-07, |
|
"loss": 4.3059, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5950155763239876e-07, |
|
"loss": 4.3329, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.517133956386293e-07, |
|
"loss": 4.1531, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.4392523364485984e-07, |
|
"loss": 3.8592, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.3613707165109035e-07, |
|
"loss": 4.0838, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.2834890965732087e-07, |
|
"loss": 4.0868, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.2056074766355143e-07, |
|
"loss": 4.2534, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.1277258566978195e-07, |
|
"loss": 4.5321, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.0498442367601246e-07, |
|
"loss": 4.5844, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.97196261682243e-07, |
|
"loss": 3.8844, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8940809968847354e-07, |
|
"loss": 4.1078, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8161993769470406e-07, |
|
"loss": 4.1129, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.7383177570093457e-07, |
|
"loss": 3.9785, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.660436137071651e-07, |
|
"loss": 4.1421, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.5825545171339565e-07, |
|
"loss": 4.1732, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.5046728971962617e-07, |
|
"loss": 3.9707, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.426791277258567e-07, |
|
"loss": 3.9777, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.348909657320872e-07, |
|
"loss": 4.1549, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.2710280373831776e-07, |
|
"loss": 4.3472, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.193146417445483e-07, |
|
"loss": 4.2759, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.1152647975077885e-07, |
|
"loss": 4.1722, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.0373831775700936e-07, |
|
"loss": 4.3045, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.959501557632399e-07, |
|
"loss": 4.2202, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.8816199376947044e-07, |
|
"loss": 3.9183, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.8037383177570096e-07, |
|
"loss": 4.0868, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7258566978193147e-07, |
|
"loss": 4.2776, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.64797507788162e-07, |
|
"loss": 4.0122, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.5700934579439255e-07, |
|
"loss": 4.2619, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.4922118380062307e-07, |
|
"loss": 3.8762, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.414330218068536e-07, |
|
"loss": 4.0124, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.3364485981308412e-07, |
|
"loss": 4.066, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.2585669781931466e-07, |
|
"loss": 4.0679, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.1806853582554518e-07, |
|
"loss": 3.9699, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.1028037383177572e-07, |
|
"loss": 4.214, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.0249221183800623e-07, |
|
"loss": 3.9071, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9470404984423677e-07, |
|
"loss": 4.036, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.8691588785046729e-07, |
|
"loss": 4.2717, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7912772585669783e-07, |
|
"loss": 3.8852, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7133956386292834e-07, |
|
"loss": 4.0428, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6355140186915888e-07, |
|
"loss": 4.0236, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5576323987538942e-07, |
|
"loss": 3.9906, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.4797507788161994e-07, |
|
"loss": 4.1054, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.4018691588785048e-07, |
|
"loss": 4.0593, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.32398753894081e-07, |
|
"loss": 4.0713, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.2461059190031153e-07, |
|
"loss": 4.0799, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.1682242990654206e-07, |
|
"loss": 4.0986, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0903426791277259e-07, |
|
"loss": 4.1712, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0124610591900312e-07, |
|
"loss": 4.0225, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.345794392523364e-08, |
|
"loss": 4.1809, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.566978193146417e-08, |
|
"loss": 4.0027, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.788161993769471e-08, |
|
"loss": 4.1786, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.009345794392524e-08, |
|
"loss": 4.3278, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.230529595015577e-08, |
|
"loss": 4.1014, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.4517133956386294e-08, |
|
"loss": 4.4837, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.672897196261682e-08, |
|
"loss": 4.0499, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.8940809968847356e-08, |
|
"loss": 4.4883, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.1152647975077883e-08, |
|
"loss": 3.5648, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1784, |
|
"total_flos": 0.0, |
|
"train_loss": 4.238124672874742, |
|
"train_runtime": 8582.3717, |
|
"train_samples_per_second": 3.325, |
|
"train_steps_per_second": 0.208 |
|
} |
|
], |
|
"max_steps": 1784, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|