|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9987389659520807, |
|
"global_step": 594, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.8209, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 4.9975, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 4.8762, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 4.8257, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 4.7808, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 4.795, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 4.8669, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 4.7709, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 4.8758, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 4.7759, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.8e-06, |
|
"loss": 4.7793, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 4.8164, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.2e-06, |
|
"loss": 4.7334, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 4.6811, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.6e-06, |
|
"loss": 4.5624, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 4.7251, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3e-06, |
|
"loss": 4.7181, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 4.7752, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 4.5284, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.6e-06, |
|
"loss": 4.7388, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.8e-06, |
|
"loss": 4.679, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 4.6217, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 4.6101, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.4e-06, |
|
"loss": 4.6706, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.6e-06, |
|
"loss": 4.4344, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 4.8303, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5e-06, |
|
"loss": 4.4799, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.2e-06, |
|
"loss": 4.3854, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.4e-06, |
|
"loss": 4.3895, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 4.5382, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.8e-06, |
|
"loss": 4.5426, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 6e-06, |
|
"loss": 4.4101, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.2e-06, |
|
"loss": 4.3962, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 4.3527, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.6e-06, |
|
"loss": 4.3979, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 4.5459, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 4.3364, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 7.2e-06, |
|
"loss": 4.4963, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.4e-06, |
|
"loss": 4.6584, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.6e-06, |
|
"loss": 4.3229, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.8e-06, |
|
"loss": 4.3119, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 4.5691, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.200000000000001e-06, |
|
"loss": 4.298, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 4.4279, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.599999999999999e-06, |
|
"loss": 4.5293, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 8.8e-06, |
|
"loss": 4.564, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9e-06, |
|
"loss": 4.4752, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.2e-06, |
|
"loss": 4.8316, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.4e-06, |
|
"loss": 4.9229, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 4.5942, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 4.2587, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 4.2727, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.02e-05, |
|
"loss": 4.2882, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.04e-05, |
|
"loss": 4.3738, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.06e-05, |
|
"loss": 4.2194, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.08e-05, |
|
"loss": 4.4177, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 4.2308, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 4.5297, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 4.2934, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.16e-05, |
|
"loss": 4.072, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.18e-05, |
|
"loss": 4.3357, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.2e-05, |
|
"loss": 4.3799, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.22e-05, |
|
"loss": 4.2646, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.24e-05, |
|
"loss": 4.2315, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 4.2819, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 4.2281, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 4.1835, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.32e-05, |
|
"loss": 4.1648, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 4.2733, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 4.2692, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 4.347, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 4.3248, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.42e-05, |
|
"loss": 4.1086, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.44e-05, |
|
"loss": 4.323, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.4599999999999999e-05, |
|
"loss": 4.2992, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.48e-05, |
|
"loss": 4.2775, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.5e-05, |
|
"loss": 4.2624, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.52e-05, |
|
"loss": 4.2317, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.54e-05, |
|
"loss": 4.1565, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.56e-05, |
|
"loss": 4.3455, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.58e-05, |
|
"loss": 4.2909, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 4.2639, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.62e-05, |
|
"loss": 4.3352, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 4.3157, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.66e-05, |
|
"loss": 4.2394, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 4.1759, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 4.4326, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.7199999999999998e-05, |
|
"loss": 4.2897, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.74e-05, |
|
"loss": 4.2653, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.76e-05, |
|
"loss": 4.3216, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.78e-05, |
|
"loss": 4.2713, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.8e-05, |
|
"loss": 4.3816, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 4.2577, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.84e-05, |
|
"loss": 4.4847, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.86e-05, |
|
"loss": 4.3693, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.88e-05, |
|
"loss": 4.4311, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9e-05, |
|
"loss": 4.4079, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 4.2798, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.94e-05, |
|
"loss": 4.5225, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 4.2098, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9800000000000004e-05, |
|
"loss": 4.3498, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2e-05, |
|
"loss": 4.1786, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 4.1771, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.04e-05, |
|
"loss": 4.1286, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.06e-05, |
|
"loss": 4.1734, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.08e-05, |
|
"loss": 4.1577, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.2961, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.12e-05, |
|
"loss": 4.3128, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.1400000000000002e-05, |
|
"loss": 4.395, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.16e-05, |
|
"loss": 4.207, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.18e-05, |
|
"loss": 4.199, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 4.2547, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.22e-05, |
|
"loss": 4.2414, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 4.2794, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.26e-05, |
|
"loss": 4.2513, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 4.2407, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 4.3321, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.32e-05, |
|
"loss": 4.1167, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 4.108, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.36e-05, |
|
"loss": 4.102, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.38e-05, |
|
"loss": 4.3165, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4e-05, |
|
"loss": 4.4299, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.4200000000000002e-05, |
|
"loss": 4.1823, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.44e-05, |
|
"loss": 4.2409, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.46e-05, |
|
"loss": 4.2589, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.48e-05, |
|
"loss": 4.0746, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.5e-05, |
|
"loss": 4.1253, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.5200000000000003e-05, |
|
"loss": 4.1079, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.54e-05, |
|
"loss": 4.237, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 4.2734, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.58e-05, |
|
"loss": 4.2196, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 4.1854, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.6200000000000003e-05, |
|
"loss": 4.253, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.64e-05, |
|
"loss": 4.3073, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6600000000000003e-05, |
|
"loss": 4.3313, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 4.1854, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 4.1861, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.7200000000000004e-05, |
|
"loss": 4.2034, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.7400000000000002e-05, |
|
"loss": 4.217, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 4.3816, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.7800000000000005e-05, |
|
"loss": 4.2944, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 4.3615, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 4.4018, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.84e-05, |
|
"loss": 4.2217, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.86e-05, |
|
"loss": 4.2796, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.88e-05, |
|
"loss": 4.612, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9e-05, |
|
"loss": 4.5694, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.9199999999999998e-05, |
|
"loss": 4.5402, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.94e-05, |
|
"loss": 4.5132, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.96e-05, |
|
"loss": 4.564, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.98e-05, |
|
"loss": 4.217, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3e-05, |
|
"loss": 4.1436, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.02e-05, |
|
"loss": 4.1186, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.04e-05, |
|
"loss": 4.0641, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.06e-05, |
|
"loss": 4.0898, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.08e-05, |
|
"loss": 4.1023, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.1e-05, |
|
"loss": 4.2444, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.12e-05, |
|
"loss": 4.0765, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.1400000000000004e-05, |
|
"loss": 4.0442, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.16e-05, |
|
"loss": 4.1485, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.18e-05, |
|
"loss": 4.1854, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 4.0798, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.2200000000000003e-05, |
|
"loss": 4.1013, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.24e-05, |
|
"loss": 4.1308, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.26e-05, |
|
"loss": 4.1663, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 4.2826, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.3e-05, |
|
"loss": 4.12, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.32e-05, |
|
"loss": 4.3403, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.3400000000000005e-05, |
|
"loss": 4.1641, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 4.2228, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.38e-05, |
|
"loss": 4.0751, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 4.1022, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.4200000000000005e-05, |
|
"loss": 4.1089, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 4.1186, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.46e-05, |
|
"loss": 4.239, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.48e-05, |
|
"loss": 4.2725, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.5e-05, |
|
"loss": 4.2117, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.52e-05, |
|
"loss": 4.1026, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.54e-05, |
|
"loss": 4.2162, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.56e-05, |
|
"loss": 4.094, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.58e-05, |
|
"loss": 4.1863, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.6e-05, |
|
"loss": 4.1617, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.62e-05, |
|
"loss": 4.1582, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 4.0647, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.66e-05, |
|
"loss": 4.4047, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.68e-05, |
|
"loss": 4.1967, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.7e-05, |
|
"loss": 4.2453, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.72e-05, |
|
"loss": 4.2804, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.74e-05, |
|
"loss": 4.426, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.76e-05, |
|
"loss": 4.1321, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.7800000000000004e-05, |
|
"loss": 4.4128, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8e-05, |
|
"loss": 4.3332, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.82e-05, |
|
"loss": 4.329, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 4.3248, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.86e-05, |
|
"loss": 4.2567, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.88e-05, |
|
"loss": 4.1483, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 4.5865, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 4.5307, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.94e-05, |
|
"loss": 4.5087, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 4.5711, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.9800000000000005e-05, |
|
"loss": 4.0395, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4e-05, |
|
"loss": 4.1998, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.02e-05, |
|
"loss": 4.2598, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 4.0994, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.0600000000000004e-05, |
|
"loss": 4.1703, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.08e-05, |
|
"loss": 4.1169, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.1e-05, |
|
"loss": 4.1565, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.12e-05, |
|
"loss": 4.1541, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.14e-05, |
|
"loss": 4.1809, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.16e-05, |
|
"loss": 4.2621, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.18e-05, |
|
"loss": 4.3034, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.2e-05, |
|
"loss": 4.2205, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.22e-05, |
|
"loss": 4.1836, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.24e-05, |
|
"loss": 4.2609, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.26e-05, |
|
"loss": 4.1626, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 4.1637, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3e-05, |
|
"loss": 4.0815, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.32e-05, |
|
"loss": 4.1643, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3400000000000005e-05, |
|
"loss": 4.1492, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.36e-05, |
|
"loss": 4.2274, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.38e-05, |
|
"loss": 4.2597, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 4.2746, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.4200000000000004e-05, |
|
"loss": 4.0996, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.44e-05, |
|
"loss": 4.2349, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.46e-05, |
|
"loss": 4.4081, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 4.1651, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.5e-05, |
|
"loss": 4.097, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.52e-05, |
|
"loss": 4.1481, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.5400000000000006e-05, |
|
"loss": 4.3052, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 4.2498, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.58e-05, |
|
"loss": 4.1329, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 4.2091, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.6200000000000005e-05, |
|
"loss": 4.3565, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.64e-05, |
|
"loss": 4.1954, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.660000000000001e-05, |
|
"loss": 4.3483, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 4.1283, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7e-05, |
|
"loss": 4.0929, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.72e-05, |
|
"loss": 4.1404, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.74e-05, |
|
"loss": 4.0962, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.76e-05, |
|
"loss": 4.2646, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.78e-05, |
|
"loss": 4.186, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.8e-05, |
|
"loss": 4.2541, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.82e-05, |
|
"loss": 4.1949, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 4.3293, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.86e-05, |
|
"loss": 4.477, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.88e-05, |
|
"loss": 4.4003, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.9e-05, |
|
"loss": 4.5343, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.92e-05, |
|
"loss": 4.5222, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.94e-05, |
|
"loss": 4.4392, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.96e-05, |
|
"loss": 4.5092, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.9800000000000004e-05, |
|
"loss": 4.2232, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5e-05, |
|
"loss": 4.2222, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.02e-05, |
|
"loss": 4.2069, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.0400000000000005e-05, |
|
"loss": 4.1118, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.0600000000000003e-05, |
|
"loss": 4.2737, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.08e-05, |
|
"loss": 4.1708, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 4.1226, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.1200000000000004e-05, |
|
"loss": 4.1544, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.14e-05, |
|
"loss": 4.2259, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.16e-05, |
|
"loss": 4.2689, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.1800000000000005e-05, |
|
"loss": 4.172, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 4.1095, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.22e-05, |
|
"loss": 4.2901, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.2400000000000007e-05, |
|
"loss": 3.9607, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.2600000000000005e-05, |
|
"loss": 4.0989, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.28e-05, |
|
"loss": 4.1631, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 4.1817, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.3200000000000006e-05, |
|
"loss": 4.0973, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.3400000000000004e-05, |
|
"loss": 4.2258, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.360000000000001e-05, |
|
"loss": 4.2652, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.380000000000001e-05, |
|
"loss": 4.114, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 4.2871, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.420000000000001e-05, |
|
"loss": 4.1757, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.440000000000001e-05, |
|
"loss": 4.1485, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.4600000000000006e-05, |
|
"loss": 4.2358, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 4.1717, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 4.281, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 4.1168, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.5400000000000005e-05, |
|
"loss": 4.301, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.560000000000001e-05, |
|
"loss": 4.3667, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.580000000000001e-05, |
|
"loss": 4.3479, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 4.2431, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.620000000000001e-05, |
|
"loss": 4.1512, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 4.1295, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.66e-05, |
|
"loss": 4.3472, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.68e-05, |
|
"loss": 4.0996, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.5036, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.72e-05, |
|
"loss": 3.9666, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.74e-05, |
|
"loss": 4.2155, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.76e-05, |
|
"loss": 4.2013, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.7799999999999995e-05, |
|
"loss": 4.3886, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.8e-05, |
|
"loss": 4.0919, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.82e-05, |
|
"loss": 4.2544, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.8399999999999997e-05, |
|
"loss": 4.3885, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.86e-05, |
|
"loss": 4.3546, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.88e-05, |
|
"loss": 4.1183, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.9e-05, |
|
"loss": 4.4789, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.92e-05, |
|
"loss": 4.6137, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.94e-05, |
|
"loss": 4.5089, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.96e-05, |
|
"loss": 4.6724, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.9800000000000003e-05, |
|
"loss": 4.3125, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6e-05, |
|
"loss": 4.2696, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.02e-05, |
|
"loss": 4.3593, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.04e-05, |
|
"loss": 4.0906, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.06e-05, |
|
"loss": 4.2907, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.08e-05, |
|
"loss": 4.3188, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.1e-05, |
|
"loss": 4.0912, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.12e-05, |
|
"loss": 4.284, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.14e-05, |
|
"loss": 4.2691, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.16e-05, |
|
"loss": 4.2903, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.18e-05, |
|
"loss": 4.1323, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.2e-05, |
|
"loss": 4.1224, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.220000000000001e-05, |
|
"loss": 4.2004, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.24e-05, |
|
"loss": 4.2083, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.26e-05, |
|
"loss": 4.1996, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.280000000000001e-05, |
|
"loss": 4.2615, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.3e-05, |
|
"loss": 4.1322, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.32e-05, |
|
"loss": 4.2435, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.340000000000001e-05, |
|
"loss": 4.1421, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.36e-05, |
|
"loss": 4.2892, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.38e-05, |
|
"loss": 4.0954, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 4.1498, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.42e-05, |
|
"loss": 4.2609, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.440000000000001e-05, |
|
"loss": 4.2246, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.460000000000001e-05, |
|
"loss": 4.1842, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.48e-05, |
|
"loss": 4.1417, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.4157, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.52e-05, |
|
"loss": 4.1353, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.54e-05, |
|
"loss": 4.0712, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.560000000000001e-05, |
|
"loss": 4.2765, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.58e-05, |
|
"loss": 4.263, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.6e-05, |
|
"loss": 4.0798, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.620000000000001e-05, |
|
"loss": 4.328, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.64e-05, |
|
"loss": 4.2533, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.66e-05, |
|
"loss": 4.2726, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.680000000000001e-05, |
|
"loss": 4.1403, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.7e-05, |
|
"loss": 4.1282, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.720000000000001e-05, |
|
"loss": 4.2455, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.740000000000001e-05, |
|
"loss": 4.1952, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.76e-05, |
|
"loss": 4.3327, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.780000000000001e-05, |
|
"loss": 4.2905, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 4.374, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.82e-05, |
|
"loss": 4.487, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.840000000000001e-05, |
|
"loss": 4.3049, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.860000000000001e-05, |
|
"loss": 4.4144, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.879999999999999e-05, |
|
"loss": 4.1528, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.4872, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.92e-05, |
|
"loss": 4.5556, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.939999999999999e-05, |
|
"loss": 4.5082, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.96e-05, |
|
"loss": 4.6211, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 6.98e-05, |
|
"loss": 4.3283, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7e-05, |
|
"loss": 4.2621, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.02e-05, |
|
"loss": 4.3229, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.04e-05, |
|
"loss": 4.1606, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.06e-05, |
|
"loss": 4.224, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.08e-05, |
|
"loss": 4.2125, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.1e-05, |
|
"loss": 4.2523, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.12e-05, |
|
"loss": 4.1371, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.14e-05, |
|
"loss": 4.1853, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.16e-05, |
|
"loss": 4.1967, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.18e-05, |
|
"loss": 4.3175, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.2e-05, |
|
"loss": 4.1987, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.22e-05, |
|
"loss": 4.375, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.24e-05, |
|
"loss": 4.1402, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.26e-05, |
|
"loss": 4.2573, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.280000000000001e-05, |
|
"loss": 4.1376, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.3e-05, |
|
"loss": 4.2101, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.32e-05, |
|
"loss": 4.1182, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.340000000000001e-05, |
|
"loss": 4.2608, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.36e-05, |
|
"loss": 4.2736, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.38e-05, |
|
"loss": 4.1054, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.4e-05, |
|
"loss": 4.2215, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.42e-05, |
|
"loss": 4.0778, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.44e-05, |
|
"loss": 4.1373, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.46e-05, |
|
"loss": 4.3252, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.48e-05, |
|
"loss": 4.316, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.2559, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.52e-05, |
|
"loss": 4.2918, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.54e-05, |
|
"loss": 4.0799, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.560000000000001e-05, |
|
"loss": 4.3441, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.58e-05, |
|
"loss": 4.2877, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.6e-05, |
|
"loss": 4.1236, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.620000000000001e-05, |
|
"loss": 4.1902, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.64e-05, |
|
"loss": 4.2293, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.66e-05, |
|
"loss": 4.5028, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 4.2033, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.7e-05, |
|
"loss": 4.3762, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.72e-05, |
|
"loss": 4.318, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.740000000000001e-05, |
|
"loss": 4.2075, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.76e-05, |
|
"loss": 4.1818, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.780000000000001e-05, |
|
"loss": 4.3408, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 4.3702, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.82e-05, |
|
"loss": 4.5948, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.840000000000001e-05, |
|
"loss": 4.2956, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.860000000000001e-05, |
|
"loss": 4.3763, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.88e-05, |
|
"loss": 4.3044, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 4.3955, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.920000000000001e-05, |
|
"loss": 4.7559, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.94e-05, |
|
"loss": 4.4716, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.960000000000001e-05, |
|
"loss": 4.7723, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.98e-05, |
|
"loss": 4.3635, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8e-05, |
|
"loss": 4.2703, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.020000000000001e-05, |
|
"loss": 4.3672, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.04e-05, |
|
"loss": 4.3438, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.060000000000001e-05, |
|
"loss": 4.1643, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 4.2507, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.3051, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.120000000000001e-05, |
|
"loss": 4.1477, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.14e-05, |
|
"loss": 4.1829, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.16e-05, |
|
"loss": 4.1823, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.18e-05, |
|
"loss": 4.1373, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.2e-05, |
|
"loss": 4.3487, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 8.22e-05, |
|
"loss": 4.2404, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.24e-05, |
|
"loss": 4.3134, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.26e-05, |
|
"loss": 4.2718, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.28e-05, |
|
"loss": 4.1852, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.3e-05, |
|
"loss": 4.2251, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.32e-05, |
|
"loss": 4.0132, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 8.34e-05, |
|
"loss": 4.4129, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.36e-05, |
|
"loss": 4.2629, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.38e-05, |
|
"loss": 4.0219, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.1473, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.42e-05, |
|
"loss": 4.3076, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.44e-05, |
|
"loss": 4.2272, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 8.46e-05, |
|
"loss": 4.2579, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.48e-05, |
|
"loss": 4.3549, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.5e-05, |
|
"loss": 4.1541, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.52e-05, |
|
"loss": 4.1218, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.54e-05, |
|
"loss": 4.2926, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.560000000000001e-05, |
|
"loss": 4.2178, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.58e-05, |
|
"loss": 4.341, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.6e-05, |
|
"loss": 4.0324, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.620000000000001e-05, |
|
"loss": 4.295, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.64e-05, |
|
"loss": 4.3282, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.66e-05, |
|
"loss": 4.5126, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.680000000000001e-05, |
|
"loss": 4.2555, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.7e-05, |
|
"loss": 4.1247, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.72e-05, |
|
"loss": 4.2901, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.740000000000001e-05, |
|
"loss": 4.2579, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.76e-05, |
|
"loss": 4.4895, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.78e-05, |
|
"loss": 4.373, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 4.6938, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.82e-05, |
|
"loss": 4.5459, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.840000000000001e-05, |
|
"loss": 4.333, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.840000000000001e-05, |
|
"loss": 4.2817, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.86e-05, |
|
"loss": 4.4313, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.88e-05, |
|
"loss": 4.4515, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 4.3566, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.92e-05, |
|
"loss": 4.583, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.94e-05, |
|
"loss": 4.3883, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.960000000000001e-05, |
|
"loss": 4.308, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 8.98e-05, |
|
"loss": 4.4201, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9e-05, |
|
"loss": 4.2154, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 9.020000000000001e-05, |
|
"loss": 4.1942, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.04e-05, |
|
"loss": 4.2917, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.06e-05, |
|
"loss": 4.2434, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.080000000000001e-05, |
|
"loss": 4.4964, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.1e-05, |
|
"loss": 4.3874, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.120000000000001e-05, |
|
"loss": 4.3414, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.140000000000001e-05, |
|
"loss": 4.3112, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.16e-05, |
|
"loss": 4.2885, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.180000000000001e-05, |
|
"loss": 4.1744, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 4.174, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.22e-05, |
|
"loss": 4.3217, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.22e-05, |
|
"loss": 4.3614, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.240000000000001e-05, |
|
"loss": 4.2587, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.260000000000001e-05, |
|
"loss": 4.2522, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.28e-05, |
|
"loss": 4.2726, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 4.2295, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.320000000000002e-05, |
|
"loss": 4.194, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.340000000000001e-05, |
|
"loss": 4.1531, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.360000000000001e-05, |
|
"loss": 4.2138, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.38e-05, |
|
"loss": 4.2742, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.4e-05, |
|
"loss": 4.1743, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.42e-05, |
|
"loss": 4.1392, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.44e-05, |
|
"loss": 4.3719, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.46e-05, |
|
"loss": 4.2289, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.48e-05, |
|
"loss": 4.2521, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.5e-05, |
|
"loss": 4.2699, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.52e-05, |
|
"loss": 4.0737, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.54e-05, |
|
"loss": 4.488, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.56e-05, |
|
"loss": 4.3975, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.58e-05, |
|
"loss": 4.4541, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.6e-05, |
|
"loss": 4.392, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.620000000000001e-05, |
|
"loss": 4.258, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.64e-05, |
|
"loss": 4.2439, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.66e-05, |
|
"loss": 4.3562, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.680000000000001e-05, |
|
"loss": 4.3051, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.7e-05, |
|
"loss": 4.3883, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.72e-05, |
|
"loss": 4.1472, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.74e-05, |
|
"loss": 4.4604, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.76e-05, |
|
"loss": 4.351, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.78e-05, |
|
"loss": 4.0272, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.8e-05, |
|
"loss": 4.2968, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.82e-05, |
|
"loss": 4.5583, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.84e-05, |
|
"loss": 4.5452, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.86e-05, |
|
"loss": 4.3781, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.88e-05, |
|
"loss": 4.3979, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 4.6125, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.92e-05, |
|
"loss": 4.3519, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 4.385700225830078, |
|
"eval_runtime": 755.5833, |
|
"eval_samples_per_second": 3.497, |
|
"eval_steps_per_second": 0.292, |
|
"eval_wer": 1.9208647362157873, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.94e-05, |
|
"loss": 4.331, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.960000000000001e-05, |
|
"loss": 4.1535, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.98e-05, |
|
"loss": 4.2661, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001, |
|
"loss": 4.3242, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.893617021276596e-05, |
|
"loss": 4.3194, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.787234042553192e-05, |
|
"loss": 4.4679, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.680851063829788e-05, |
|
"loss": 4.2307, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.574468085106384e-05, |
|
"loss": 4.2865, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.468085106382978e-05, |
|
"loss": 4.0963, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.361702127659576e-05, |
|
"loss": 4.2769, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.25531914893617e-05, |
|
"loss": 4.3236, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.148936170212766e-05, |
|
"loss": 4.1472, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.042553191489363e-05, |
|
"loss": 4.1778, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 8.936170212765958e-05, |
|
"loss": 4.2354, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.829787234042553e-05, |
|
"loss": 4.253, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.723404255319149e-05, |
|
"loss": 4.2079, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.617021276595745e-05, |
|
"loss": 4.3176, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.510638297872341e-05, |
|
"loss": 4.3198, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.404255319148937e-05, |
|
"loss": 4.121, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.297872340425533e-05, |
|
"loss": 4.1787, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.191489361702128e-05, |
|
"loss": 4.265, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.085106382978723e-05, |
|
"loss": 4.4652, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.978723404255319e-05, |
|
"loss": 4.1396, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.872340425531916e-05, |
|
"loss": 3.9964, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.76595744680851e-05, |
|
"loss": 4.2279, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.659574468085106e-05, |
|
"loss": 4.1907, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.553191489361703e-05, |
|
"loss": 4.3998, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.446808510638298e-05, |
|
"loss": 4.1426, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.340425531914894e-05, |
|
"loss": 4.1125, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.23404255319149e-05, |
|
"loss": 4.1092, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.127659574468085e-05, |
|
"loss": 4.1359, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 7.021276595744681e-05, |
|
"loss": 4.4409, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.914893617021277e-05, |
|
"loss": 4.2359, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.808510638297873e-05, |
|
"loss": 4.2104, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.702127659574469e-05, |
|
"loss": 4.1657, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.595744680851063e-05, |
|
"loss": 4.1756, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.489361702127659e-05, |
|
"loss": 4.237, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 6.382978723404256e-05, |
|
"loss": 4.2265, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.276595744680851e-05, |
|
"loss": 4.1348, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.170212765957447e-05, |
|
"loss": 4.1533, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 6.063829787234043e-05, |
|
"loss": 4.1785, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.9574468085106384e-05, |
|
"loss": 4.2902, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.851063829787234e-05, |
|
"loss": 4.2662, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.744680851063831e-05, |
|
"loss": 4.2221, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.638297872340426e-05, |
|
"loss": 4.3467, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.531914893617022e-05, |
|
"loss": 4.5082, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.425531914893617e-05, |
|
"loss": 4.2303, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.319148936170213e-05, |
|
"loss": 4.4299, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.212765957446809e-05, |
|
"loss": 4.9461, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.1063829787234044e-05, |
|
"loss": 4.3165, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5e-05, |
|
"loss": 4.2883, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.893617021276596e-05, |
|
"loss": 4.2367, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.787234042553192e-05, |
|
"loss": 4.1961, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.680851063829788e-05, |
|
"loss": 4.1235, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.574468085106383e-05, |
|
"loss": 4.1092, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.468085106382979e-05, |
|
"loss": 4.216, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.3617021276595746e-05, |
|
"loss": 4.3598, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.2553191489361704e-05, |
|
"loss": 4.2637, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.148936170212766e-05, |
|
"loss": 4.1133, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.0425531914893614e-05, |
|
"loss": 4.1171, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.936170212765958e-05, |
|
"loss": 4.4514, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.829787234042553e-05, |
|
"loss": 4.0989, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.723404255319149e-05, |
|
"loss": 4.1127, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.617021276595745e-05, |
|
"loss": 4.402, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.5106382978723407e-05, |
|
"loss": 4.2184, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4042553191489365e-05, |
|
"loss": 4.2015, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.2978723404255317e-05, |
|
"loss": 4.2155, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.191489361702128e-05, |
|
"loss": 4.0767, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.085106382978723e-05, |
|
"loss": 4.0548, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.9787234042553192e-05, |
|
"loss": 4.2154, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.8723404255319154e-05, |
|
"loss": 4.1975, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.765957446808511e-05, |
|
"loss": 4.2138, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.6595744680851064e-05, |
|
"loss": 4.3058, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.5531914893617022e-05, |
|
"loss": 4.1456, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.446808510638298e-05, |
|
"loss": 4.2585, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.340425531914894e-05, |
|
"loss": 4.1975, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.2340425531914894e-05, |
|
"loss": 4.2476, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.1276595744680852e-05, |
|
"loss": 4.0191, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.0212765957446807e-05, |
|
"loss": 4.0424, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.9148936170212766e-05, |
|
"loss": 4.2502, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8085106382978724e-05, |
|
"loss": 4.4058, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7021276595744682e-05, |
|
"loss": 4.269, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.595744680851064e-05, |
|
"loss": 4.0582, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4893617021276596e-05, |
|
"loss": 4.1872, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3829787234042554e-05, |
|
"loss": 4.156, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.2765957446808511e-05, |
|
"loss": 4.0976, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.170212765957447e-05, |
|
"loss": 4.2902, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0638297872340426e-05, |
|
"loss": 4.2762, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.574468085106383e-06, |
|
"loss": 4.2949, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 4.2574, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.446808510638298e-06, |
|
"loss": 4.4847, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.3829787234042555e-06, |
|
"loss": 4.579, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.319148936170213e-06, |
|
"loss": 4.6057, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 4.4414, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 594, |
|
"total_flos": 0.0, |
|
"train_loss": 4.292364594912288, |
|
"train_runtime": 5438.2563, |
|
"train_samples_per_second": 5.248, |
|
"train_steps_per_second": 0.109 |
|
} |
|
], |
|
"max_steps": 594, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|