|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9995796553173603, |
|
"global_step": 1189, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.3247, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 8.6635, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.0379, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 9.0547, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 8.8855, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 8.8753, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 8.5676, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 8.4724, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 8.8081, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 8.5251, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 8.355, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 8.4144, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-06, |
|
"loss": 8.3692, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 8.1513, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.2e-06, |
|
"loss": 8.1517, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 7.9598, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.6e-06, |
|
"loss": 8.0229, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 8.1036, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3e-06, |
|
"loss": 7.8943, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 7.8666, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 7.6748, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6e-06, |
|
"loss": 7.6982, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8e-06, |
|
"loss": 7.7092, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 7.7163, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 7.5864, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4e-06, |
|
"loss": 7.6307, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6e-06, |
|
"loss": 7.6469, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 7.4145, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-06, |
|
"loss": 7.5651, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.2e-06, |
|
"loss": 7.6513, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.4e-06, |
|
"loss": 7.5343, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 7.4476, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.8e-06, |
|
"loss": 7.6121, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6e-06, |
|
"loss": 7.2192, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.2e-06, |
|
"loss": 7.3556, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 7.5773, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.6e-06, |
|
"loss": 7.396, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 7.4057, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 7.5256, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.2e-06, |
|
"loss": 7.3596, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.4e-06, |
|
"loss": 7.2581, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.6e-06, |
|
"loss": 7.4072, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.8e-06, |
|
"loss": 7.3126, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 7.3703, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.200000000000001e-06, |
|
"loss": 7.4614, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 7.5204, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.599999999999999e-06, |
|
"loss": 7.4169, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.8e-06, |
|
"loss": 7.5653, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9e-06, |
|
"loss": 7.4251, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.2e-06, |
|
"loss": 7.4275, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.4e-06, |
|
"loss": 7.4487, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 7.2411, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 7.1513, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1e-05, |
|
"loss": 7.2095, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.02e-05, |
|
"loss": 7.3188, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.04e-05, |
|
"loss": 7.012, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.06e-05, |
|
"loss": 7.4734, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.08e-05, |
|
"loss": 7.1145, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 7.0793, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 7.2494, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 7.0728, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.16e-05, |
|
"loss": 7.1014, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.18e-05, |
|
"loss": 7.243, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.2e-05, |
|
"loss": 7.1645, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.22e-05, |
|
"loss": 7.1121, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.24e-05, |
|
"loss": 7.193, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 7.1381, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 7.0773, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 7.0531, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.32e-05, |
|
"loss": 7.0903, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 7.232, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 7.2922, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 7.1531, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 7.1851, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.42e-05, |
|
"loss": 6.9753, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.44e-05, |
|
"loss": 7.3358, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4599999999999999e-05, |
|
"loss": 7.114, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.48e-05, |
|
"loss": 6.9431, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5e-05, |
|
"loss": 6.974, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.52e-05, |
|
"loss": 7.0624, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.54e-05, |
|
"loss": 7.1886, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.56e-05, |
|
"loss": 7.2449, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.58e-05, |
|
"loss": 6.9961, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 7.115, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.62e-05, |
|
"loss": 7.0465, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 7.1174, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.66e-05, |
|
"loss": 7.02, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 7.1203, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 7.2337, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.7199999999999998e-05, |
|
"loss": 7.0477, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.74e-05, |
|
"loss": 7.2718, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.76e-05, |
|
"loss": 7.1269, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.78e-05, |
|
"loss": 6.8366, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8e-05, |
|
"loss": 7.1281, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 7.2863, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.84e-05, |
|
"loss": 6.9239, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.86e-05, |
|
"loss": 7.0845, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.88e-05, |
|
"loss": 7.0067, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9e-05, |
|
"loss": 6.6292, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 6.4719, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.94e-05, |
|
"loss": 7.1517, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 7.1006, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9800000000000004e-05, |
|
"loss": 7.1244, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2e-05, |
|
"loss": 7.1029, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 7.0077, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.04e-05, |
|
"loss": 7.1126, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.06e-05, |
|
"loss": 7.0549, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.08e-05, |
|
"loss": 7.1308, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.1e-05, |
|
"loss": 7.2395, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.12e-05, |
|
"loss": 7.0873, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.1400000000000002e-05, |
|
"loss": 7.1595, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.16e-05, |
|
"loss": 7.1262, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.18e-05, |
|
"loss": 7.0425, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 7.0284, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.22e-05, |
|
"loss": 7.1293, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 7.2394, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.26e-05, |
|
"loss": 7.0671, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 6.9098, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 6.8846, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.32e-05, |
|
"loss": 7.1227, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 7.1857, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.36e-05, |
|
"loss": 7.0162, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.38e-05, |
|
"loss": 7.1089, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.4e-05, |
|
"loss": 7.0784, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.4200000000000002e-05, |
|
"loss": 7.0332, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.44e-05, |
|
"loss": 6.9283, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.46e-05, |
|
"loss": 7.2615, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.48e-05, |
|
"loss": 6.9811, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.5e-05, |
|
"loss": 7.0035, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.5200000000000003e-05, |
|
"loss": 6.9837, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.54e-05, |
|
"loss": 6.8607, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 7.0237, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.58e-05, |
|
"loss": 7.2092, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 6.9669, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.6200000000000003e-05, |
|
"loss": 7.049, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.64e-05, |
|
"loss": 6.967, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.6600000000000003e-05, |
|
"loss": 6.982, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 7.0445, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 6.9934, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.7200000000000004e-05, |
|
"loss": 7.1241, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.7400000000000002e-05, |
|
"loss": 7.0486, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 6.9578, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.7800000000000005e-05, |
|
"loss": 6.9053, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 7.0169, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 6.8802, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.84e-05, |
|
"loss": 7.0577, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.86e-05, |
|
"loss": 6.7687, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.88e-05, |
|
"loss": 6.5566, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.9e-05, |
|
"loss": 6.7013, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.9199999999999998e-05, |
|
"loss": 6.5686, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.94e-05, |
|
"loss": 7.2573, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.96e-05, |
|
"loss": 7.312, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.98e-05, |
|
"loss": 7.1299, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3e-05, |
|
"loss": 7.1751, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.02e-05, |
|
"loss": 7.0241, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.04e-05, |
|
"loss": 7.004, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.06e-05, |
|
"loss": 7.0294, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.08e-05, |
|
"loss": 7.1861, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.1e-05, |
|
"loss": 6.9487, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.12e-05, |
|
"loss": 6.895, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.1400000000000004e-05, |
|
"loss": 6.9653, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.16e-05, |
|
"loss": 7.0934, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.18e-05, |
|
"loss": 7.1509, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 6.9768, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.2200000000000003e-05, |
|
"loss": 6.9718, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.24e-05, |
|
"loss": 6.8331, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.26e-05, |
|
"loss": 6.9732, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 7.0108, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.3e-05, |
|
"loss": 7.029, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.32e-05, |
|
"loss": 6.9196, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.3400000000000005e-05, |
|
"loss": 6.9597, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 6.9481, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.38e-05, |
|
"loss": 6.7448, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 6.9987, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.4200000000000005e-05, |
|
"loss": 7.0149, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 7.1468, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.46e-05, |
|
"loss": 7.1226, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.48e-05, |
|
"loss": 7.0432, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.5e-05, |
|
"loss": 6.8863, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.52e-05, |
|
"loss": 6.9892, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.54e-05, |
|
"loss": 7.0484, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.56e-05, |
|
"loss": 7.1684, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.58e-05, |
|
"loss": 6.9025, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.6e-05, |
|
"loss": 6.9378, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.62e-05, |
|
"loss": 6.7895, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 6.8623, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.66e-05, |
|
"loss": 7.0826, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.68e-05, |
|
"loss": 6.8756, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.7e-05, |
|
"loss": 6.9103, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.72e-05, |
|
"loss": 6.8717, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.74e-05, |
|
"loss": 7.0968, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.76e-05, |
|
"loss": 7.0395, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.7800000000000004e-05, |
|
"loss": 6.8671, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.8e-05, |
|
"loss": 6.8825, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.82e-05, |
|
"loss": 6.7859, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 6.7025, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.86e-05, |
|
"loss": 7.0283, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.88e-05, |
|
"loss": 6.7671, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 6.6417, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 6.2291, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.94e-05, |
|
"loss": 7.0375, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 6.9879, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.9800000000000005e-05, |
|
"loss": 7.0054, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4e-05, |
|
"loss": 6.8702, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.02e-05, |
|
"loss": 6.8741, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 6.964, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.0600000000000004e-05, |
|
"loss": 6.9912, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.08e-05, |
|
"loss": 7.1566, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.1e-05, |
|
"loss": 6.989, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.12e-05, |
|
"loss": 7.1266, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.14e-05, |
|
"loss": 6.8965, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.16e-05, |
|
"loss": 6.8712, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.18e-05, |
|
"loss": 6.9341, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.9179, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.22e-05, |
|
"loss": 6.9548, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.24e-05, |
|
"loss": 6.9452, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.26e-05, |
|
"loss": 6.9553, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 6.952, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.3e-05, |
|
"loss": 6.8752, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.32e-05, |
|
"loss": 7.0775, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.3400000000000005e-05, |
|
"loss": 6.8812, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.36e-05, |
|
"loss": 7.2083, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.38e-05, |
|
"loss": 7.0885, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 7.0697, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.4200000000000004e-05, |
|
"loss": 6.8708, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.44e-05, |
|
"loss": 6.7835, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.46e-05, |
|
"loss": 7.033, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 7.0343, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5e-05, |
|
"loss": 6.8657, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.52e-05, |
|
"loss": 6.9962, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5400000000000006e-05, |
|
"loss": 7.0693, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 6.9766, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.58e-05, |
|
"loss": 6.9592, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 6.8095, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6200000000000005e-05, |
|
"loss": 7.0233, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.64e-05, |
|
"loss": 6.9603, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.660000000000001e-05, |
|
"loss": 6.9996, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 6.9041, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.7e-05, |
|
"loss": 7.0113, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.72e-05, |
|
"loss": 6.7716, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.74e-05, |
|
"loss": 6.9219, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.76e-05, |
|
"loss": 6.8892, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.78e-05, |
|
"loss": 7.0176, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.8e-05, |
|
"loss": 7.037, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.82e-05, |
|
"loss": 6.7713, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 6.741, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.86e-05, |
|
"loss": 7.2401, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.88e-05, |
|
"loss": 6.6211, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9e-05, |
|
"loss": 6.5931, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.92e-05, |
|
"loss": 6.2639, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.94e-05, |
|
"loss": 7.0658, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.96e-05, |
|
"loss": 6.8681, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9800000000000004e-05, |
|
"loss": 7.0808, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5e-05, |
|
"loss": 6.8697, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.02e-05, |
|
"loss": 6.9275, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.0400000000000005e-05, |
|
"loss": 7.0266, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.0600000000000003e-05, |
|
"loss": 6.8679, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.08e-05, |
|
"loss": 7.0036, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 6.9099, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.1200000000000004e-05, |
|
"loss": 6.8513, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.14e-05, |
|
"loss": 6.9212, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.16e-05, |
|
"loss": 6.8269, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.1800000000000005e-05, |
|
"loss": 6.8293, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 6.9572, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.22e-05, |
|
"loss": 6.8728, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.2400000000000007e-05, |
|
"loss": 6.9658, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 5.2600000000000005e-05, |
|
"loss": 6.8423, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.28e-05, |
|
"loss": 6.8384, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 6.9504, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.3200000000000006e-05, |
|
"loss": 6.9562, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.3400000000000004e-05, |
|
"loss": 7.029, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.360000000000001e-05, |
|
"loss": 6.8449, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.380000000000001e-05, |
|
"loss": 7.0153, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 7.0158, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.420000000000001e-05, |
|
"loss": 7.0075, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.440000000000001e-05, |
|
"loss": 6.7583, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.4600000000000006e-05, |
|
"loss": 6.9008, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 6.9393, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 7.0252, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 6.8239, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.5400000000000005e-05, |
|
"loss": 6.7894, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.560000000000001e-05, |
|
"loss": 6.9119, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.580000000000001e-05, |
|
"loss": 7.0304, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 7.009, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.620000000000001e-05, |
|
"loss": 6.9092, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 6.926, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.66e-05, |
|
"loss": 6.8346, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.68e-05, |
|
"loss": 6.8209, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 7.0022, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.72e-05, |
|
"loss": 6.8586, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 5.74e-05, |
|
"loss": 7.0947, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.76e-05, |
|
"loss": 6.968, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.7799999999999995e-05, |
|
"loss": 6.6243, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.8e-05, |
|
"loss": 6.7225, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.82e-05, |
|
"loss": 6.864, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.8399999999999997e-05, |
|
"loss": 7.034, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.86e-05, |
|
"loss": 6.5696, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.88e-05, |
|
"loss": 6.6805, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.9e-05, |
|
"loss": 6.8128, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.92e-05, |
|
"loss": 6.4896, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.94e-05, |
|
"loss": 6.8916, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.96e-05, |
|
"loss": 7.0109, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.9800000000000003e-05, |
|
"loss": 7.2182, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6e-05, |
|
"loss": 6.8207, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.02e-05, |
|
"loss": 6.9541, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.04e-05, |
|
"loss": 6.8865, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.06e-05, |
|
"loss": 6.9264, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.08e-05, |
|
"loss": 6.8721, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.1e-05, |
|
"loss": 6.7873, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.12e-05, |
|
"loss": 7.0311, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.14e-05, |
|
"loss": 6.7227, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.16e-05, |
|
"loss": 6.7776, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.18e-05, |
|
"loss": 6.813, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.2e-05, |
|
"loss": 6.9337, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.220000000000001e-05, |
|
"loss": 6.8514, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.24e-05, |
|
"loss": 7.0988, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.26e-05, |
|
"loss": 6.9746, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.280000000000001e-05, |
|
"loss": 6.9976, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.3e-05, |
|
"loss": 6.8891, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.32e-05, |
|
"loss": 6.9056, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.340000000000001e-05, |
|
"loss": 6.9837, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.36e-05, |
|
"loss": 6.8951, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.38e-05, |
|
"loss": 6.8557, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 6.9098, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.42e-05, |
|
"loss": 6.7932, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.440000000000001e-05, |
|
"loss": 6.9744, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 6.460000000000001e-05, |
|
"loss": 6.9093, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.48e-05, |
|
"loss": 6.9594, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 6.8792, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.52e-05, |
|
"loss": 7.0447, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.54e-05, |
|
"loss": 6.9182, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.560000000000001e-05, |
|
"loss": 7.0251, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.58e-05, |
|
"loss": 7.0966, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.6e-05, |
|
"loss": 6.862, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.620000000000001e-05, |
|
"loss": 6.9816, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.64e-05, |
|
"loss": 7.0401, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.66e-05, |
|
"loss": 6.8123, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.680000000000001e-05, |
|
"loss": 7.0709, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 6.7e-05, |
|
"loss": 6.8759, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.720000000000001e-05, |
|
"loss": 6.9694, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.740000000000001e-05, |
|
"loss": 7.6347, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.76e-05, |
|
"loss": 7.0917, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.780000000000001e-05, |
|
"loss": 6.83, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 7.0027, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.82e-05, |
|
"loss": 6.8037, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.840000000000001e-05, |
|
"loss": 6.7355, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.860000000000001e-05, |
|
"loss": 6.9161, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.879999999999999e-05, |
|
"loss": 6.9802, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.9e-05, |
|
"loss": 6.5977, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 6.92e-05, |
|
"loss": 6.7131, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 6.939999999999999e-05, |
|
"loss": 7.1996, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 6.96e-05, |
|
"loss": 7.0542, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 6.98e-05, |
|
"loss": 6.943, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7e-05, |
|
"loss": 7.0235, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.02e-05, |
|
"loss": 6.8991, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.04e-05, |
|
"loss": 6.998, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.06e-05, |
|
"loss": 7.0441, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.08e-05, |
|
"loss": 6.8244, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.1e-05, |
|
"loss": 6.9419, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.12e-05, |
|
"loss": 7.045, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.14e-05, |
|
"loss": 6.8331, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 7.16e-05, |
|
"loss": 6.8301, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.18e-05, |
|
"loss": 7.032, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.2e-05, |
|
"loss": 6.9414, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.22e-05, |
|
"loss": 6.8982, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.24e-05, |
|
"loss": 6.9995, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.26e-05, |
|
"loss": 6.9437, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.280000000000001e-05, |
|
"loss": 7.0787, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.3e-05, |
|
"loss": 6.9583, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.32e-05, |
|
"loss": 7.0143, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.340000000000001e-05, |
|
"loss": 6.8137, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.36e-05, |
|
"loss": 6.8843, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.38e-05, |
|
"loss": 6.7664, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.4e-05, |
|
"loss": 6.8902, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.42e-05, |
|
"loss": 6.8532, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.44e-05, |
|
"loss": 7.034, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.46e-05, |
|
"loss": 6.8505, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.48e-05, |
|
"loss": 6.9052, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 6.9518, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.52e-05, |
|
"loss": 6.9093, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.54e-05, |
|
"loss": 6.8996, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.560000000000001e-05, |
|
"loss": 6.9103, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.58e-05, |
|
"loss": 6.727, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.6e-05, |
|
"loss": 7.0833, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.620000000000001e-05, |
|
"loss": 6.8944, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 7.64e-05, |
|
"loss": 6.7779, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.66e-05, |
|
"loss": 6.8474, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 6.9242, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.7e-05, |
|
"loss": 7.0125, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.72e-05, |
|
"loss": 7.1872, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.740000000000001e-05, |
|
"loss": 7.0436, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.76e-05, |
|
"loss": 6.8846, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.780000000000001e-05, |
|
"loss": 6.7697, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 6.8717, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.82e-05, |
|
"loss": 6.7052, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.840000000000001e-05, |
|
"loss": 6.8186, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.860000000000001e-05, |
|
"loss": 6.5507, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 7.88e-05, |
|
"loss": 6.5835, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 6.4965, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.920000000000001e-05, |
|
"loss": 6.3385, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.94e-05, |
|
"loss": 6.9722, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.960000000000001e-05, |
|
"loss": 6.9542, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 7.98e-05, |
|
"loss": 6.9712, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8e-05, |
|
"loss": 7.0509, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.020000000000001e-05, |
|
"loss": 6.8197, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.04e-05, |
|
"loss": 6.9267, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.060000000000001e-05, |
|
"loss": 6.9446, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 6.9549, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.1e-05, |
|
"loss": 6.8005, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.120000000000001e-05, |
|
"loss": 7.0207, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.14e-05, |
|
"loss": 7.0019, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.16e-05, |
|
"loss": 6.9901, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.18e-05, |
|
"loss": 6.8819, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.2e-05, |
|
"loss": 6.813, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.22e-05, |
|
"loss": 6.8615, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.24e-05, |
|
"loss": 7.1023, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.26e-05, |
|
"loss": 6.8824, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.28e-05, |
|
"loss": 6.7186, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.3e-05, |
|
"loss": 6.8284, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.32e-05, |
|
"loss": 7.0155, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.34e-05, |
|
"loss": 6.9274, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.36e-05, |
|
"loss": 6.8079, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.38e-05, |
|
"loss": 6.9139, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.4e-05, |
|
"loss": 6.8378, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.42e-05, |
|
"loss": 7.1138, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.44e-05, |
|
"loss": 6.8973, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.46e-05, |
|
"loss": 6.8576, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.48e-05, |
|
"loss": 6.9101, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.5e-05, |
|
"loss": 7.0098, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.52e-05, |
|
"loss": 6.9232, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.54e-05, |
|
"loss": 6.9982, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.560000000000001e-05, |
|
"loss": 6.6777, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.58e-05, |
|
"loss": 6.9689, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.6e-05, |
|
"loss": 6.9129, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.620000000000001e-05, |
|
"loss": 6.9933, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.64e-05, |
|
"loss": 6.7077, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.66e-05, |
|
"loss": 6.8051, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.680000000000001e-05, |
|
"loss": 6.775, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.7e-05, |
|
"loss": 7.0047, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.72e-05, |
|
"loss": 6.8368, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.740000000000001e-05, |
|
"loss": 6.9213, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.76e-05, |
|
"loss": 6.7953, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.78e-05, |
|
"loss": 6.8183, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 6.7266, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.82e-05, |
|
"loss": 6.6057, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.840000000000001e-05, |
|
"loss": 6.7775, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.86e-05, |
|
"loss": 6.7385, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.88e-05, |
|
"loss": 6.8687, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 6.5854, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.92e-05, |
|
"loss": 6.4474, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.94e-05, |
|
"loss": 6.8885, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.960000000000001e-05, |
|
"loss": 6.8552, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.98e-05, |
|
"loss": 6.9002, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9e-05, |
|
"loss": 6.8761, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.020000000000001e-05, |
|
"loss": 6.91, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.04e-05, |
|
"loss": 6.8655, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.06e-05, |
|
"loss": 6.895, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.080000000000001e-05, |
|
"loss": 7.0138, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.1e-05, |
|
"loss": 6.9679, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.120000000000001e-05, |
|
"loss": 6.9424, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.140000000000001e-05, |
|
"loss": 6.8648, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.16e-05, |
|
"loss": 6.8458, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.180000000000001e-05, |
|
"loss": 6.9237, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 6.9511, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.22e-05, |
|
"loss": 6.9354, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.240000000000001e-05, |
|
"loss": 6.8418, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.260000000000001e-05, |
|
"loss": 6.7693, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.28e-05, |
|
"loss": 6.8263, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 6.6364, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.320000000000002e-05, |
|
"loss": 7.0722, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.340000000000001e-05, |
|
"loss": 6.8436, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.360000000000001e-05, |
|
"loss": 6.9876, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.38e-05, |
|
"loss": 6.9677, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.4e-05, |
|
"loss": 7.0472, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.42e-05, |
|
"loss": 6.8728, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.44e-05, |
|
"loss": 6.7339, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.46e-05, |
|
"loss": 6.668, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.48e-05, |
|
"loss": 6.8264, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.5e-05, |
|
"loss": 6.8349, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.52e-05, |
|
"loss": 7.0129, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.54e-05, |
|
"loss": 6.7754, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.56e-05, |
|
"loss": 6.7871, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.58e-05, |
|
"loss": 6.8136, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.6e-05, |
|
"loss": 6.918, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.620000000000001e-05, |
|
"loss": 6.9401, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.64e-05, |
|
"loss": 6.768, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.66e-05, |
|
"loss": 6.808, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.680000000000001e-05, |
|
"loss": 6.9011, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.7e-05, |
|
"loss": 6.952, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.72e-05, |
|
"loss": 6.8199, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.74e-05, |
|
"loss": 6.945, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.76e-05, |
|
"loss": 6.9324, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.78e-05, |
|
"loss": 6.9868, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.8e-05, |
|
"loss": 6.87, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.82e-05, |
|
"loss": 6.8098, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.84e-05, |
|
"loss": 6.7418, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.86e-05, |
|
"loss": 6.7101, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.88e-05, |
|
"loss": 6.9152, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 6.6035, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.92e-05, |
|
"loss": 6.5791, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_loss": 6.816896915435791, |
|
"eval_runtime": 765.6629, |
|
"eval_samples_per_second": 3.451, |
|
"eval_steps_per_second": 0.289, |
|
"eval_wer": 1.5492588844574082, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.94e-05, |
|
"loss": 6.9541, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.960000000000001e-05, |
|
"loss": 6.9048, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.98e-05, |
|
"loss": 6.939, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001, |
|
"loss": 6.8767, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 9.985486211901307e-05, |
|
"loss": 6.8614, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.970972423802612e-05, |
|
"loss": 6.9876, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.956458635703919e-05, |
|
"loss": 6.8565, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.941944847605225e-05, |
|
"loss": 6.8818, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.927431059506532e-05, |
|
"loss": 6.8064, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.912917271407838e-05, |
|
"loss": 6.898, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.898403483309145e-05, |
|
"loss": 6.7305, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.883889695210451e-05, |
|
"loss": 6.9452, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.869375907111757e-05, |
|
"loss": 6.8496, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.854862119013063e-05, |
|
"loss": 6.7898, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.84034833091437e-05, |
|
"loss": 6.9233, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.825834542815675e-05, |
|
"loss": 6.7248, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.811320754716981e-05, |
|
"loss": 6.9197, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.796806966618288e-05, |
|
"loss": 6.7908, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.782293178519594e-05, |
|
"loss": 7.065, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.767779390420901e-05, |
|
"loss": 6.7811, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.753265602322207e-05, |
|
"loss": 6.9213, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.738751814223513e-05, |
|
"loss": 6.9074, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.724238026124819e-05, |
|
"loss": 6.9336, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.709724238026126e-05, |
|
"loss": 6.8676, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.695210449927431e-05, |
|
"loss": 6.8523, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.680696661828737e-05, |
|
"loss": 6.7354, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.666182873730044e-05, |
|
"loss": 6.8742, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.65166908563135e-05, |
|
"loss": 6.8885, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.637155297532656e-05, |
|
"loss": 6.8817, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.622641509433963e-05, |
|
"loss": 6.874, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.60812772133527e-05, |
|
"loss": 6.8362, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.593613933236575e-05, |
|
"loss": 6.8776, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.579100145137882e-05, |
|
"loss": 6.7152, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.564586357039188e-05, |
|
"loss": 6.7962, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.550072568940493e-05, |
|
"loss": 6.9336, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.5355587808418e-05, |
|
"loss": 6.7417, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.521044992743106e-05, |
|
"loss": 6.8053, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.506531204644412e-05, |
|
"loss": 6.7024, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.492017416545718e-05, |
|
"loss": 6.874, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.477503628447025e-05, |
|
"loss": 6.9014, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.462989840348333e-05, |
|
"loss": 6.9623, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.448476052249638e-05, |
|
"loss": 6.7767, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.433962264150944e-05, |
|
"loss": 6.6075, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.419448476052251e-05, |
|
"loss": 6.9661, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.404934687953556e-05, |
|
"loss": 6.8717, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.390420899854863e-05, |
|
"loss": 6.7853, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.375907111756169e-05, |
|
"loss": 6.5027, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.361393323657474e-05, |
|
"loss": 6.6507, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.346879535558781e-05, |
|
"loss": 6.5563, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.332365747460087e-05, |
|
"loss": 6.4705, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.317851959361394e-05, |
|
"loss": 7.015, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.3033381712627e-05, |
|
"loss": 6.8955, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 9.288824383164007e-05, |
|
"loss": 6.9354, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.274310595065312e-05, |
|
"loss": 6.798, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.259796806966619e-05, |
|
"loss": 6.9976, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.245283018867925e-05, |
|
"loss": 6.8724, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.230769230769232e-05, |
|
"loss": 6.8855, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.216255442670537e-05, |
|
"loss": 6.8249, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.201741654571843e-05, |
|
"loss": 6.9647, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.18722786647315e-05, |
|
"loss": 6.786, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.172714078374456e-05, |
|
"loss": 7.0736, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.158200290275763e-05, |
|
"loss": 6.6794, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.14368650217707e-05, |
|
"loss": 6.9444, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.129172714078375e-05, |
|
"loss": 6.5859, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.114658925979681e-05, |
|
"loss": 6.7858, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.100145137880988e-05, |
|
"loss": 6.8445, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.085631349782293e-05, |
|
"loss": 6.7727, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.0711175616836e-05, |
|
"loss": 6.8257, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.056603773584906e-05, |
|
"loss": 6.846, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.042089985486212e-05, |
|
"loss": 6.8269, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.027576197387519e-05, |
|
"loss": 6.886, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 9.013062409288826e-05, |
|
"loss": 6.88, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.998548621190132e-05, |
|
"loss": 6.8439, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.984034833091437e-05, |
|
"loss": 6.9082, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.969521044992744e-05, |
|
"loss": 7.0682, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.95500725689405e-05, |
|
"loss": 6.9712, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.940493468795355e-05, |
|
"loss": 6.9183, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.925979680696662e-05, |
|
"loss": 6.8653, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.911465892597968e-05, |
|
"loss": 6.8013, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.896952104499274e-05, |
|
"loss": 6.9922, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.882438316400582e-05, |
|
"loss": 6.9692, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.867924528301888e-05, |
|
"loss": 6.7556, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.853410740203193e-05, |
|
"loss": 6.7936, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.8388969521045e-05, |
|
"loss": 6.7749, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.824383164005806e-05, |
|
"loss": 7.0034, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.809869375907113e-05, |
|
"loss": 6.8547, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.795355587808418e-05, |
|
"loss": 6.9211, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.780841799709725e-05, |
|
"loss": 6.8094, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.766328011611031e-05, |
|
"loss": 6.7241, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.751814223512336e-05, |
|
"loss": 6.8183, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.737300435413643e-05, |
|
"loss": 6.907, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.722786647314949e-05, |
|
"loss": 6.7743, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.708272859216256e-05, |
|
"loss": 6.892, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.693759071117562e-05, |
|
"loss": 6.7386, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.679245283018869e-05, |
|
"loss": 6.641, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.664731494920174e-05, |
|
"loss": 6.6438, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.65021770682148e-05, |
|
"loss": 6.5923, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.635703918722787e-05, |
|
"loss": 6.598, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.621190130624092e-05, |
|
"loss": 6.4153, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.606676342525399e-05, |
|
"loss": 6.3571, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.592162554426705e-05, |
|
"loss": 6.776, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.577648766328012e-05, |
|
"loss": 6.986, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.563134978229318e-05, |
|
"loss": 6.9519, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.548621190130625e-05, |
|
"loss": 6.8186, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.534107402031931e-05, |
|
"loss": 6.9578, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.519593613933237e-05, |
|
"loss": 7.0568, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.505079825834543e-05, |
|
"loss": 6.7339, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.49056603773585e-05, |
|
"loss": 6.9278, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.476052249637155e-05, |
|
"loss": 6.8668, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.461538461538461e-05, |
|
"loss": 6.9006, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.447024673439768e-05, |
|
"loss": 6.9543, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.432510885341074e-05, |
|
"loss": 6.7867, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.417997097242381e-05, |
|
"loss": 6.8398, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.403483309143688e-05, |
|
"loss": 6.769, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.388969521044994e-05, |
|
"loss": 6.9037, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.374455732946299e-05, |
|
"loss": 6.7226, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.359941944847606e-05, |
|
"loss": 6.8153, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.345428156748912e-05, |
|
"loss": 6.8123, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.330914368650217e-05, |
|
"loss": 6.8919, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.316400580551524e-05, |
|
"loss": 6.938, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.30188679245283e-05, |
|
"loss": 6.6307, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.287373004354137e-05, |
|
"loss": 6.7166, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.272859216255444e-05, |
|
"loss": 6.7758, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.25834542815675e-05, |
|
"loss": 6.8605, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.243831640058055e-05, |
|
"loss": 6.8635, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.229317851959362e-05, |
|
"loss": 6.9247, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.214804063860668e-05, |
|
"loss": 6.9132, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.200290275761974e-05, |
|
"loss": 6.6796, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.18577648766328e-05, |
|
"loss": 6.7311, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.171262699564587e-05, |
|
"loss": 6.7434, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.156748911465893e-05, |
|
"loss": 6.8427, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.142235123367198e-05, |
|
"loss": 6.9377, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.127721335268506e-05, |
|
"loss": 6.8147, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.113207547169813e-05, |
|
"loss": 6.9054, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.098693759071118e-05, |
|
"loss": 6.8393, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.084179970972424e-05, |
|
"loss": 6.8763, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.069666182873731e-05, |
|
"loss": 6.7708, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.055152394775036e-05, |
|
"loss": 6.7692, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.040638606676343e-05, |
|
"loss": 6.8221, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.026124818577649e-05, |
|
"loss": 7.0328, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.011611030478954e-05, |
|
"loss": 6.7504, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.997097242380261e-05, |
|
"loss": 6.7132, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.982583454281567e-05, |
|
"loss": 6.9498, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.968069666182875e-05, |
|
"loss": 6.7619, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.95355587808418e-05, |
|
"loss": 6.8033, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.939042089985487e-05, |
|
"loss": 6.6719, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.924528301886794e-05, |
|
"loss": 6.2472, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.910014513788099e-05, |
|
"loss": 6.7799, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.895500725689405e-05, |
|
"loss": 6.755, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.880986937590712e-05, |
|
"loss": 6.2811, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.866473149492017e-05, |
|
"loss": 7.0813, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.851959361393323e-05, |
|
"loss": 6.8376, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.83744557329463e-05, |
|
"loss": 6.9793, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.822931785195937e-05, |
|
"loss": 6.9689, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.808417997097243e-05, |
|
"loss": 6.9487, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.79390420899855e-05, |
|
"loss": 6.897, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.779390420899855e-05, |
|
"loss": 6.7693, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.764876632801161e-05, |
|
"loss": 6.8785, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.750362844702468e-05, |
|
"loss": 6.8355, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.735849056603774e-05, |
|
"loss": 6.8414, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.72133526850508e-05, |
|
"loss": 6.7411, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.706821480406386e-05, |
|
"loss": 6.8667, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.692307692307693e-05, |
|
"loss": 6.6934, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.677793904208999e-05, |
|
"loss": 6.9048, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.663280116110306e-05, |
|
"loss": 6.8159, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.648766328011612e-05, |
|
"loss": 6.6955, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.634252539912917e-05, |
|
"loss": 6.9094, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.619738751814224e-05, |
|
"loss": 6.7883, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.60522496371553e-05, |
|
"loss": 7.106, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.590711175616836e-05, |
|
"loss": 6.9303, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.576197387518142e-05, |
|
"loss": 6.8192, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.561683599419449e-05, |
|
"loss": 6.7872, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.547169811320755e-05, |
|
"loss": 6.7987, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.532656023222062e-05, |
|
"loss": 6.7436, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.518142235123368e-05, |
|
"loss": 6.9109, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.503628447024675e-05, |
|
"loss": 6.7913, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.48911465892598e-05, |
|
"loss": 6.9789, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.474600870827286e-05, |
|
"loss": 6.9467, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.460087082728593e-05, |
|
"loss": 6.8445, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.445573294629898e-05, |
|
"loss": 6.96, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.431059506531205e-05, |
|
"loss": 6.6298, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.416545718432511e-05, |
|
"loss": 6.7145, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.402031930333816e-05, |
|
"loss": 6.7885, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.387518142235124e-05, |
|
"loss": 6.7045, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.373004354136431e-05, |
|
"loss": 6.786, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.358490566037736e-05, |
|
"loss": 6.7507, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.343976777939043e-05, |
|
"loss": 6.8735, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.329462989840349e-05, |
|
"loss": 6.8046, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.314949201741656e-05, |
|
"loss": 6.9841, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.300435413642961e-05, |
|
"loss": 6.7626, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.285921625544267e-05, |
|
"loss": 6.9631, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.271407837445574e-05, |
|
"loss": 6.7161, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.256894049346879e-05, |
|
"loss": 6.6926, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.242380261248185e-05, |
|
"loss": 6.8529, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.227866473149493e-05, |
|
"loss": 6.8109, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.213352685050799e-05, |
|
"loss": 6.4349, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.198838896952105e-05, |
|
"loss": 6.8403, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.184325108853412e-05, |
|
"loss": 6.7861, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.169811320754717e-05, |
|
"loss": 6.3615, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.155297532656023e-05, |
|
"loss": 6.6005, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.14078374455733e-05, |
|
"loss": 7.0148, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.126269956458636e-05, |
|
"loss": 6.7751, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.111756168359942e-05, |
|
"loss": 6.9264, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.097242380261248e-05, |
|
"loss": 6.7989, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.082728592162555e-05, |
|
"loss": 6.828, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.068214804063861e-05, |
|
"loss": 6.912, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.053701015965168e-05, |
|
"loss": 6.8274, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.039187227866474e-05, |
|
"loss": 6.7373, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.02467343976778e-05, |
|
"loss": 6.8207, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.010159651669086e-05, |
|
"loss": 6.9445, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.995645863570392e-05, |
|
"loss": 6.7258, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.981132075471698e-05, |
|
"loss": 7.0621, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.966618287373004e-05, |
|
"loss": 6.859, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.95210449927431e-05, |
|
"loss": 6.8342, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.937590711175617e-05, |
|
"loss": 6.7359, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.923076923076924e-05, |
|
"loss": 6.7588, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.90856313497823e-05, |
|
"loss": 6.786, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.894049346879537e-05, |
|
"loss": 6.8782, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 6.879535558780842e-05, |
|
"loss": 6.8366, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.865021770682148e-05, |
|
"loss": 6.7578, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.850507982583455e-05, |
|
"loss": 6.6656, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.83599419448476e-05, |
|
"loss": 6.7171, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.821480406386067e-05, |
|
"loss": 6.7658, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.806966618287373e-05, |
|
"loss": 6.6905, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.79245283018868e-05, |
|
"loss": 6.8089, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.777939042089986e-05, |
|
"loss": 6.7434, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.763425253991293e-05, |
|
"loss": 6.7556, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.748911465892598e-05, |
|
"loss": 6.8644, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.734397677793905e-05, |
|
"loss": 6.7758, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.719883889695211e-05, |
|
"loss": 6.8507, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.705370101596516e-05, |
|
"loss": 6.7743, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.690856313497823e-05, |
|
"loss": 6.8178, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.676342525399129e-05, |
|
"loss": 6.8479, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.661828737300436e-05, |
|
"loss": 6.7398, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.647314949201742e-05, |
|
"loss": 6.8079, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.632801161103049e-05, |
|
"loss": 6.7721, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.618287373004355e-05, |
|
"loss": 6.9678, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.60377358490566e-05, |
|
"loss": 6.9002, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.589259796806967e-05, |
|
"loss": 6.8699, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.574746008708274e-05, |
|
"loss": 6.7618, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.560232220609579e-05, |
|
"loss": 6.9287, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.545718432510885e-05, |
|
"loss": 6.864, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.531204644412192e-05, |
|
"loss": 6.8797, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.516690856313497e-05, |
|
"loss": 6.6929, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.502177068214804e-05, |
|
"loss": 6.667, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.487663280116111e-05, |
|
"loss": 6.5773, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.473149492017418e-05, |
|
"loss": 6.6875, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.458635703918723e-05, |
|
"loss": 6.9012, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.44412191582003e-05, |
|
"loss": 6.4418, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.429608127721336e-05, |
|
"loss": 6.1, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.415094339622641e-05, |
|
"loss": 6.9027, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.400580551523948e-05, |
|
"loss": 7.0302, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.386066763425254e-05, |
|
"loss": 6.7954, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.37155297532656e-05, |
|
"loss": 6.7707, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.357039187227866e-05, |
|
"loss": 6.9749, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.342525399129173e-05, |
|
"loss": 6.904, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.328011611030479e-05, |
|
"loss": 6.8076, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.313497822931786e-05, |
|
"loss": 6.9567, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.298984034833092e-05, |
|
"loss": 6.6893, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.284470246734397e-05, |
|
"loss": 6.8536, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.269956458635704e-05, |
|
"loss": 6.797, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.25544267053701e-05, |
|
"loss": 6.6714, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.240928882438317e-05, |
|
"loss": 6.7449, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.226415094339622e-05, |
|
"loss": 6.6406, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.211901306240929e-05, |
|
"loss": 7.0426, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.197387518142235e-05, |
|
"loss": 6.8331, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.182873730043542e-05, |
|
"loss": 6.8195, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.168359941944848e-05, |
|
"loss": 6.651, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.153846153846155e-05, |
|
"loss": 6.7969, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.13933236574746e-05, |
|
"loss": 6.8453, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.124818577648767e-05, |
|
"loss": 6.7899, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.110304789550073e-05, |
|
"loss": 6.9263, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.095791001451378e-05, |
|
"loss": 6.7198, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.0812772133526855e-05, |
|
"loss": 6.6432, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.066763425253992e-05, |
|
"loss": 6.8854, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.0522496371552985e-05, |
|
"loss": 6.7342, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.037735849056604e-05, |
|
"loss": 6.7204, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.02322206095791e-05, |
|
"loss": 6.7801, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 6.008708272859217e-05, |
|
"loss": 6.8489, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.9941944847605226e-05, |
|
"loss": 6.9924, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.979680696661829e-05, |
|
"loss": 6.8397, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.965166908563136e-05, |
|
"loss": 6.7032, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.950653120464441e-05, |
|
"loss": 6.6046, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.9361393323657474e-05, |
|
"loss": 6.8323, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.9216255442670546e-05, |
|
"loss": 6.8935, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.90711175616836e-05, |
|
"loss": 6.6631, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.892597968069666e-05, |
|
"loss": 6.9037, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.878084179970973e-05, |
|
"loss": 6.7529, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.863570391872279e-05, |
|
"loss": 6.7957, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.849056603773585e-05, |
|
"loss": 6.723, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.834542815674892e-05, |
|
"loss": 6.847, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.820029027576198e-05, |
|
"loss": 6.7401, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.8055152394775034e-05, |
|
"loss": 6.7185, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.79100145137881e-05, |
|
"loss": 6.6107, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.7764876632801165e-05, |
|
"loss": 6.6154, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.7619738751814224e-05, |
|
"loss": 6.5039, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.747460087082729e-05, |
|
"loss": 6.5098, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.7329462989840354e-05, |
|
"loss": 6.4776, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.718432510885341e-05, |
|
"loss": 6.451, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.703918722786648e-05, |
|
"loss": 6.1167, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.689404934687954e-05, |
|
"loss": 6.842, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.6748911465892595e-05, |
|
"loss": 6.6685, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.660377358490566e-05, |
|
"loss": 6.7821, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.6458635703918726e-05, |
|
"loss": 6.8192, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.631349782293179e-05, |
|
"loss": 6.8466, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.616835994194485e-05, |
|
"loss": 6.7241, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.6023222060957915e-05, |
|
"loss": 6.7587, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.587808417997098e-05, |
|
"loss": 6.8157, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.573294629898403e-05, |
|
"loss": 6.7791, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.55878084179971e-05, |
|
"loss": 6.8145, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.544267053701017e-05, |
|
"loss": 6.7379, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.529753265602322e-05, |
|
"loss": 6.76, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.5152394775036286e-05, |
|
"loss": 6.709, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.500725689404935e-05, |
|
"loss": 7.0016, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.486211901306241e-05, |
|
"loss": 6.9221, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.4716981132075475e-05, |
|
"loss": 6.646, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.457184325108854e-05, |
|
"loss": 6.8079, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.442670537010159e-05, |
|
"loss": 6.5571, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.428156748911466e-05, |
|
"loss": 6.7677, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.413642960812772e-05, |
|
"loss": 6.8188, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.399129172714079e-05, |
|
"loss": 6.8109, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.384615384615385e-05, |
|
"loss": 6.7071, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.370101596516691e-05, |
|
"loss": 6.7861, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.355587808417998e-05, |
|
"loss": 6.8952, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.3410740203193036e-05, |
|
"loss": 6.7592, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.32656023222061e-05, |
|
"loss": 6.7845, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.3120464441219166e-05, |
|
"loss": 6.8417, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.297532656023222e-05, |
|
"loss": 6.8408, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.283018867924528e-05, |
|
"loss": 6.7785, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.268505079825835e-05, |
|
"loss": 6.968, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.253991291727141e-05, |
|
"loss": 6.8102, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.239477503628447e-05, |
|
"loss": 6.5849, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.224963715529754e-05, |
|
"loss": 6.7127, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.21044992743106e-05, |
|
"loss": 6.8402, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.1959361393323655e-05, |
|
"loss": 6.8481, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.181422351233673e-05, |
|
"loss": 6.8904, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.166908563134979e-05, |
|
"loss": 6.832, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.1523947750362844e-05, |
|
"loss": 6.4837, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.137880986937591e-05, |
|
"loss": 6.9687, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.1233671988388975e-05, |
|
"loss": 6.6962, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.108853410740203e-05, |
|
"loss": 6.8096, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.09433962264151e-05, |
|
"loss": 6.703, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.0798258345428164e-05, |
|
"loss": 6.9807, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.0653120464441215e-05, |
|
"loss": 6.5803, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.050798258345428e-05, |
|
"loss": 6.7102, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.0362844702467346e-05, |
|
"loss": 6.6611, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.0217706821480404e-05, |
|
"loss": 6.6247, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.007256894049347e-05, |
|
"loss": 6.3862, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.9927431059506535e-05, |
|
"loss": 6.7488, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.9782293178519594e-05, |
|
"loss": 6.5929, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.963715529753266e-05, |
|
"loss": 7.0097, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.9492017416545724e-05, |
|
"loss": 6.7953, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.934687953555878e-05, |
|
"loss": 6.816, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.920174165457185e-05, |
|
"loss": 6.9158, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.9056603773584906e-05, |
|
"loss": 6.8304, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.891146589259797e-05, |
|
"loss": 6.8529, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.876632801161104e-05, |
|
"loss": 6.6973, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.8621190130624096e-05, |
|
"loss": 6.8356, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.8476052249637154e-05, |
|
"loss": 6.7024, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.833091436865022e-05, |
|
"loss": 6.745, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.818577648766328e-05, |
|
"loss": 6.7976, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.804063860667635e-05, |
|
"loss": 6.8322, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.789550072568941e-05, |
|
"loss": 6.6484, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.775036284470247e-05, |
|
"loss": 6.823, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.760522496371553e-05, |
|
"loss": 6.8371, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.746008708272859e-05, |
|
"loss": 6.7053, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.731494920174166e-05, |
|
"loss": 6.6614, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.716981132075472e-05, |
|
"loss": 6.694, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.702467343976778e-05, |
|
"loss": 6.657, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.6879535558780845e-05, |
|
"loss": 6.6412, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.6734397677793904e-05, |
|
"loss": 6.6954, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.658925979680697e-05, |
|
"loss": 6.6111, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.6444121915820034e-05, |
|
"loss": 6.8912, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.629898403483309e-05, |
|
"loss": 6.8317, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.615384615384616e-05, |
|
"loss": 6.8738, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.600870827285922e-05, |
|
"loss": 6.6447, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.586357039187228e-05, |
|
"loss": 6.6893, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.571843251088535e-05, |
|
"loss": 6.8431, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.5573294629898406e-05, |
|
"loss": 6.7028, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.5428156748911464e-05, |
|
"loss": 6.8807, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.528301886792453e-05, |
|
"loss": 6.7155, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.5137880986937595e-05, |
|
"loss": 6.8045, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.499274310595066e-05, |
|
"loss": 6.8412, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.484760522496372e-05, |
|
"loss": 6.8275, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.470246734397678e-05, |
|
"loss": 6.6627, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.455732946298984e-05, |
|
"loss": 6.7555, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.441219158200291e-05, |
|
"loss": 6.747, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.4267053701015966e-05, |
|
"loss": 6.7697, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.412191582002903e-05, |
|
"loss": 6.8475, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.397677793904209e-05, |
|
"loss": 6.6429, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.3831640058055155e-05, |
|
"loss": 6.8477, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.3686502177068214e-05, |
|
"loss": 6.7814, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.354136429608128e-05, |
|
"loss": 6.6208, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.3396226415094345e-05, |
|
"loss": 6.6035, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.32510885341074e-05, |
|
"loss": 6.7138, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.310595065312046e-05, |
|
"loss": 6.6683, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.296081277213353e-05, |
|
"loss": 6.4505, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.281567489114659e-05, |
|
"loss": 6.2522, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.267053701015966e-05, |
|
"loss": 6.528, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2525399129172716e-05, |
|
"loss": 5.9871, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2380261248185774e-05, |
|
"loss": 6.8863, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.223512336719884e-05, |
|
"loss": 6.8323, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2089985486211905e-05, |
|
"loss": 6.8203, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.194484760522497e-05, |
|
"loss": 6.6356, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.179970972423803e-05, |
|
"loss": 6.7684, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.165457184325109e-05, |
|
"loss": 6.8672, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.150943396226415e-05, |
|
"loss": 6.745, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.136429608127722e-05, |
|
"loss": 7.0935, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.1219158200290276e-05, |
|
"loss": 6.7924, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.107402031930334e-05, |
|
"loss": 6.8196, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.09288824383164e-05, |
|
"loss": 6.734, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.0783744557329466e-05, |
|
"loss": 6.8255, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.063860667634253e-05, |
|
"loss": 6.7578, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.049346879535559e-05, |
|
"loss": 6.6894, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.0348330914368655e-05, |
|
"loss": 6.7667, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.020319303338171e-05, |
|
"loss": 6.8318, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.005805515239477e-05, |
|
"loss": 6.7737, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.991291727140784e-05, |
|
"loss": 6.7788, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.97677793904209e-05, |
|
"loss": 6.8496, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.962264150943397e-05, |
|
"loss": 6.7655, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.9477503628447026e-05, |
|
"loss": 6.682, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.9332365747460085e-05, |
|
"loss": 6.9001, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.918722786647315e-05, |
|
"loss": 6.7989, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.9042089985486215e-05, |
|
"loss": 6.8357, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.8896952104499274e-05, |
|
"loss": 6.679, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.875181422351234e-05, |
|
"loss": 6.7386, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.86066763425254e-05, |
|
"loss": 6.8577, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 6.8165, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.831640058055153e-05, |
|
"loss": 6.6807, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.817126269956459e-05, |
|
"loss": 6.7314, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.802612481857765e-05, |
|
"loss": 6.7713, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.788098693759071e-05, |
|
"loss": 6.8034, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7735849056603776e-05, |
|
"loss": 6.8894, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.759071117561684e-05, |
|
"loss": 6.7599, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.74455732946299e-05, |
|
"loss": 6.7594, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.7300435413642965e-05, |
|
"loss": 6.7416, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.7155297532656023e-05, |
|
"loss": 6.7552, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.701015965166908e-05, |
|
"loss": 6.9256, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6865021770682154e-05, |
|
"loss": 6.7792, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.671988388969521e-05, |
|
"loss": 6.6895, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.657474600870828e-05, |
|
"loss": 6.6861, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6429608127721336e-05, |
|
"loss": 6.7978, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6284470246734395e-05, |
|
"loss": 6.4635, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.613933236574747e-05, |
|
"loss": 6.4301, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.5994194484760525e-05, |
|
"loss": 6.811, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.5849056603773584e-05, |
|
"loss": 6.5899, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.570391872278665e-05, |
|
"loss": 6.4177, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.555878084179971e-05, |
|
"loss": 6.3287, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.541364296081277e-05, |
|
"loss": 6.3375, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.526850507982584e-05, |
|
"loss": 6.1771, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.51233671988389e-05, |
|
"loss": 6.8212, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.497822931785196e-05, |
|
"loss": 6.8401, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.483309143686502e-05, |
|
"loss": 6.7216, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.4687953555878086e-05, |
|
"loss": 6.5769, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.454281567489115e-05, |
|
"loss": 6.724, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.439767779390421e-05, |
|
"loss": 6.8832, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.4252539912917275e-05, |
|
"loss": 6.7873, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.4107402031930334e-05, |
|
"loss": 6.8771, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.39622641509434e-05, |
|
"loss": 6.7765, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.3817126269956464e-05, |
|
"loss": 6.6819, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.367198838896952e-05, |
|
"loss": 6.7155, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.352685050798258e-05, |
|
"loss": 6.7164, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.3381712626995646e-05, |
|
"loss": 6.6213, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.323657474600871e-05, |
|
"loss": 6.7345, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.309143686502178e-05, |
|
"loss": 6.7811, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.2946298984034836e-05, |
|
"loss": 6.7716, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.2801161103047894e-05, |
|
"loss": 6.9639, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.265602322206096e-05, |
|
"loss": 6.7817, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.251088534107402e-05, |
|
"loss": 6.5145, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.236574746008709e-05, |
|
"loss": 6.7859, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.222060957910015e-05, |
|
"loss": 6.6373, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.207547169811321e-05, |
|
"loss": 6.6221, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.193033381712627e-05, |
|
"loss": 6.7009, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.178519593613933e-05, |
|
"loss": 6.7816, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.1640058055152396e-05, |
|
"loss": 6.7988, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.149492017416546e-05, |
|
"loss": 6.8296, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.134978229317852e-05, |
|
"loss": 6.6786, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.1204644412191585e-05, |
|
"loss": 6.7087, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.1059506531204644e-05, |
|
"loss": 6.7003, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.091436865021771e-05, |
|
"loss": 6.801, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 7.0072, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.062409288824383e-05, |
|
"loss": 6.7875, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.047895500725689e-05, |
|
"loss": 6.7898, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.033381712626996e-05, |
|
"loss": 6.9678, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.018867924528302e-05, |
|
"loss": 6.6714, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.0043541364296084e-05, |
|
"loss": 6.6592, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9898403483309146e-05, |
|
"loss": 6.8233, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9753265602322204e-05, |
|
"loss": 6.7382, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9608127721335273e-05, |
|
"loss": 6.7139, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.946298984034833e-05, |
|
"loss": 6.6707, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9317851959361393e-05, |
|
"loss": 6.7045, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.917271407837446e-05, |
|
"loss": 6.7427, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.9027576197387517e-05, |
|
"loss": 6.619, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.8882438316400583e-05, |
|
"loss": 6.852, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.8737300435413644e-05, |
|
"loss": 6.7438, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.8592162554426706e-05, |
|
"loss": 6.9753, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.844702467343977e-05, |
|
"loss": 6.6846, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 6.4898, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.8156748911465895e-05, |
|
"loss": 6.4531, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.8011611030478957e-05, |
|
"loss": 6.3192, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 6.661755084991455, |
|
"eval_runtime": 804.0636, |
|
"eval_samples_per_second": 3.286, |
|
"eval_steps_per_second": 0.275, |
|
"eval_wer": 1.8028255650137905, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.7866473149492016e-05, |
|
"loss": 6.7604, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.7721335268505085e-05, |
|
"loss": 6.8039, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.7576197387518143e-05, |
|
"loss": 6.7985, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.7431059506531205e-05, |
|
"loss": 6.8165, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.728592162554427e-05, |
|
"loss": 6.7572, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.714078374455733e-05, |
|
"loss": 6.8065, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.6995645863570394e-05, |
|
"loss": 6.7116, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.6850507982583456e-05, |
|
"loss": 6.6239, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.6705370101596518e-05, |
|
"loss": 6.8422, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.6560232220609583e-05, |
|
"loss": 6.6058, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.641509433962264e-05, |
|
"loss": 6.6577, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.6269956458635704e-05, |
|
"loss": 6.7455, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.612481857764877e-05, |
|
"loss": 6.6332, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5979680696661827e-05, |
|
"loss": 6.6506, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5834542815674896e-05, |
|
"loss": 6.6368, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5689404934687955e-05, |
|
"loss": 6.8033, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5544267053701017e-05, |
|
"loss": 6.6214, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.5399129172714082e-05, |
|
"loss": 6.7163, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.525399129172714e-05, |
|
"loss": 6.8469, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.5108853410740202e-05, |
|
"loss": 6.6031, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.4963715529753268e-05, |
|
"loss": 6.7796, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.481857764876633e-05, |
|
"loss": 6.8408, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.467343976777939e-05, |
|
"loss": 6.8541, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.4528301886792453e-05, |
|
"loss": 6.6765, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.438316400580552e-05, |
|
"loss": 6.6333, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.4238026124818577e-05, |
|
"loss": 6.6848, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.409288824383164e-05, |
|
"loss": 6.6127, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.3947750362844704e-05, |
|
"loss": 6.7227, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.3802612481857766e-05, |
|
"loss": 6.741, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.365747460087083e-05, |
|
"loss": 6.6524, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.351233671988389e-05, |
|
"loss": 6.774, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.3367198838896952e-05, |
|
"loss": 6.5939, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.3222060957910017e-05, |
|
"loss": 6.7241, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.307692307692308e-05, |
|
"loss": 6.6701, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.293178519593614e-05, |
|
"loss": 6.7152, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.2786647314949203e-05, |
|
"loss": 6.828, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.2641509433962265e-05, |
|
"loss": 6.5995, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.249637155297533e-05, |
|
"loss": 6.7937, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.235123367198839e-05, |
|
"loss": 6.7432, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.2206095791001454e-05, |
|
"loss": 6.6041, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.2060957910014516e-05, |
|
"loss": 6.5828, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.1915820029027578e-05, |
|
"loss": 6.6497, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.177068214804064e-05, |
|
"loss": 6.613, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.16255442670537e-05, |
|
"loss": 6.6383, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.1480406386066763e-05, |
|
"loss": 6.6637, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.133526850507983e-05, |
|
"loss": 6.6572, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.1190130624092887e-05, |
|
"loss": 6.434, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.1044992743105953e-05, |
|
"loss": 6.6083, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.0899854862119014e-05, |
|
"loss": 6.7062, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.0754716981132076e-05, |
|
"loss": 6.2409, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.0609579100145138e-05, |
|
"loss": 6.8725, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.04644412191582e-05, |
|
"loss": 6.6812, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.0319303338171265e-05, |
|
"loss": 6.7332, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.0174165457184327e-05, |
|
"loss": 6.7983, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.0029027576197386e-05, |
|
"loss": 6.8113, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.988388969521045e-05, |
|
"loss": 6.8284, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.9738751814223513e-05, |
|
"loss": 6.7666, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.9593613933236575e-05, |
|
"loss": 6.6421, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.9448476052249637e-05, |
|
"loss": 6.826, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.93033381712627e-05, |
|
"loss": 6.7973, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.9158200290275764e-05, |
|
"loss": 6.8295, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.9013062409288826e-05, |
|
"loss": 6.7402, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 6.8082, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.872278664731495e-05, |
|
"loss": 6.6675, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8577648766328012e-05, |
|
"loss": 6.6782, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8432510885341077e-05, |
|
"loss": 6.7005, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.828737300435414e-05, |
|
"loss": 6.7035, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8142235123367197e-05, |
|
"loss": 6.7503, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7997097242380263e-05, |
|
"loss": 6.8567, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7851959361393325e-05, |
|
"loss": 6.7818, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7706821480406387e-05, |
|
"loss": 6.6334, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.756168359941945e-05, |
|
"loss": 6.7261, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.741654571843251e-05, |
|
"loss": 6.5404, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7271407837445576e-05, |
|
"loss": 6.6958, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7126269956458638e-05, |
|
"loss": 6.4291, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.69811320754717e-05, |
|
"loss": 6.7895, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.683599419448476e-05, |
|
"loss": 6.9405, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6690856313497823e-05, |
|
"loss": 6.8261, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.654571843251089e-05, |
|
"loss": 6.6099, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6400580551523947e-05, |
|
"loss": 6.6709, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.625544267053701e-05, |
|
"loss": 6.7486, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6110304789550074e-05, |
|
"loss": 7.022, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5965166908563136e-05, |
|
"loss": 6.5623, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5820029027576198e-05, |
|
"loss": 6.6926, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.567489114658926e-05, |
|
"loss": 6.7017, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5529753265602322e-05, |
|
"loss": 6.7289, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 6.6565, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.5239477503628446e-05, |
|
"loss": 6.7436, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.509433962264151e-05, |
|
"loss": 6.7289, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4949201741654573e-05, |
|
"loss": 6.6083, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4804063860667636e-05, |
|
"loss": 6.6317, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4658925979680697e-05, |
|
"loss": 6.5615, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4513788098693759e-05, |
|
"loss": 6.5356, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4368650217706822e-05, |
|
"loss": 6.7477, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4223512336719886e-05, |
|
"loss": 6.5428, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.4078374455732948e-05, |
|
"loss": 6.6898, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.3933236574746008e-05, |
|
"loss": 6.4827, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.3788098693759072e-05, |
|
"loss": 6.743, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.3642960812772135e-05, |
|
"loss": 6.7047, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.3497822931785197e-05, |
|
"loss": 5.9048, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3352685050798259e-05, |
|
"loss": 6.7719, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.320754716981132e-05, |
|
"loss": 6.7523, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.3062409288824384e-05, |
|
"loss": 6.8444, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.2917271407837448e-05, |
|
"loss": 6.6876, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.2772133526850508e-05, |
|
"loss": 6.6657, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.262699564586357e-05, |
|
"loss": 6.596, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.2481857764876634e-05, |
|
"loss": 6.9563, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.2336719883889696e-05, |
|
"loss": 6.8935, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.219158200290276e-05, |
|
"loss": 6.7621, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.204644412191582e-05, |
|
"loss": 6.6331, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1901306240928883e-05, |
|
"loss": 6.7108, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1756168359941945e-05, |
|
"loss": 6.8204, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1611030478955009e-05, |
|
"loss": 6.5971, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.146589259796807e-05, |
|
"loss": 6.6035, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1320754716981132e-05, |
|
"loss": 6.8656, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1175616835994194e-05, |
|
"loss": 6.7761, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1030478955007258e-05, |
|
"loss": 6.8372, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.088534107402032e-05, |
|
"loss": 6.7408, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0740203193033382e-05, |
|
"loss": 6.5574, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0595065312046444e-05, |
|
"loss": 6.8797, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0449927431059507e-05, |
|
"loss": 6.7414, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0304789550072569e-05, |
|
"loss": 6.9331, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0159651669085633e-05, |
|
"loss": 6.6822, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0014513788098693e-05, |
|
"loss": 6.864, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.869375907111757e-06, |
|
"loss": 6.9052, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.724238026124818e-06, |
|
"loss": 6.6208, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.579100145137882e-06, |
|
"loss": 6.719, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 6.6789, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.288824383164006e-06, |
|
"loss": 6.7152, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.14368650217707e-06, |
|
"loss": 6.6793, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.998548621190131e-06, |
|
"loss": 6.5818, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.853410740203193e-06, |
|
"loss": 6.5321, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.708272859216255e-06, |
|
"loss": 6.8218, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.563134978229319e-06, |
|
"loss": 6.7331, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.41799709724238e-06, |
|
"loss": 6.8597, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.272859216255444e-06, |
|
"loss": 6.6451, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.127721335268504e-06, |
|
"loss": 6.755, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.982583454281568e-06, |
|
"loss": 6.7175, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.83744557329463e-06, |
|
"loss": 6.6986, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 6.5629, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.547169811320755e-06, |
|
"loss": 6.8473, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.402031930333818e-06, |
|
"loss": 6.6991, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.256894049346879e-06, |
|
"loss": 6.678, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.111756168359943e-06, |
|
"loss": 6.6967, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.966618287373004e-06, |
|
"loss": 6.501, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.8214804063860676e-06, |
|
"loss": 6.7694, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.6763425253991295e-06, |
|
"loss": 6.4776, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.531204644412192e-06, |
|
"loss": 6.2739, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.386066763425254e-06, |
|
"loss": 6.5628, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.240928882438317e-06, |
|
"loss": 6.3233, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.09579100145138e-06, |
|
"loss": 6.7873, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.9506531204644415e-06, |
|
"loss": 6.5819, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.805515239477504e-06, |
|
"loss": 6.8527, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.660377358490566e-06, |
|
"loss": 6.7184, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.515239477503629e-06, |
|
"loss": 6.7437, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.370101596516691e-06, |
|
"loss": 6.9352, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.224963715529754e-06, |
|
"loss": 6.6809, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.079825834542816e-06, |
|
"loss": 6.6389, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.934687953555878e-06, |
|
"loss": 6.6878, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.789550072568941e-06, |
|
"loss": 6.7242, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.644412191582003e-06, |
|
"loss": 6.8803, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.499274310595066e-06, |
|
"loss": 6.8112, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.354136429608128e-06, |
|
"loss": 6.8707, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.20899854862119e-06, |
|
"loss": 6.7205, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.063860667634252e-06, |
|
"loss": 6.6868, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.918722786647315e-06, |
|
"loss": 6.9362, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.7735849056603773e-06, |
|
"loss": 6.5593, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.6284470246734397e-06, |
|
"loss": 6.6995, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.483309143686502e-06, |
|
"loss": 6.6244, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3381712626995647e-06, |
|
"loss": 6.605, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.193033381712627e-06, |
|
"loss": 6.7078, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.04789550072569e-06, |
|
"loss": 6.6084, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.902757619738752e-06, |
|
"loss": 6.7911, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.7576197387518145e-06, |
|
"loss": 6.8127, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.612481857764877e-06, |
|
"loss": 6.5731, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.467343976777939e-06, |
|
"loss": 6.5183, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.3222060957910015e-06, |
|
"loss": 6.6835, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.177068214804064e-06, |
|
"loss": 6.8375, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.031930333817126e-06, |
|
"loss": 6.4989, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.8867924528301887e-06, |
|
"loss": 6.9027, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.741654571843251e-06, |
|
"loss": 6.629, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5965166908563135e-06, |
|
"loss": 6.7142, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.451378809869376e-06, |
|
"loss": 6.6414, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.3062409288824384e-06, |
|
"loss": 6.6327, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.1611030478955007e-06, |
|
"loss": 6.6065, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.015965166908563e-06, |
|
"loss": 6.4935, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.708272859216255e-07, |
|
"loss": 6.3923, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.25689404934688e-07, |
|
"loss": 6.3026, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.805515239477504e-07, |
|
"loss": 6.3535, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1189, |
|
"total_flos": 0.0, |
|
"train_loss": 6.874297792115464, |
|
"train_runtime": 6595.0456, |
|
"train_samples_per_second": 4.327, |
|
"train_steps_per_second": 0.18 |
|
} |
|
], |
|
"max_steps": 1189, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|