|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.7450980392156863, |
|
"global_step": 1400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.347826086956522e-07, |
|
"loss": 1.6791, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8.695652173913044e-07, |
|
"loss": 1.7297, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3043478260869566e-06, |
|
"loss": 1.5988, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.7391304347826088e-06, |
|
"loss": 1.6378, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.173913043478261e-06, |
|
"loss": 1.6101, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.6086956521739132e-06, |
|
"loss": 1.5025, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.043478260869566e-06, |
|
"loss": 1.4691, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4782608695652175e-06, |
|
"loss": 1.5057, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.91304347826087e-06, |
|
"loss": 1.4645, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 1.4841, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.782608695652174e-06, |
|
"loss": 1.3484, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.2173913043478265e-06, |
|
"loss": 1.3993, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.652173913043479e-06, |
|
"loss": 1.2537, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.086956521739132e-06, |
|
"loss": 1.3931, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.521739130434783e-06, |
|
"loss": 1.4166, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.956521739130435e-06, |
|
"loss": 1.328, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.391304347826087e-06, |
|
"loss": 1.3269, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.82608695652174e-06, |
|
"loss": 1.3398, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.260869565217392e-06, |
|
"loss": 1.2918, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 1.3524, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.130434782608697e-06, |
|
"loss": 1.2914, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.565217391304349e-06, |
|
"loss": 1.3488, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3098, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0434782608695653e-05, |
|
"loss": 1.2008, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0869565217391305e-05, |
|
"loss": 1.2517, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1304347826086957e-05, |
|
"loss": 1.2726, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1739130434782611e-05, |
|
"loss": 1.3406, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.2173913043478263e-05, |
|
"loss": 1.2348, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2608695652173915e-05, |
|
"loss": 1.2797, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3043478260869566e-05, |
|
"loss": 1.2801, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3478260869565218e-05, |
|
"loss": 1.2752, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.391304347826087e-05, |
|
"loss": 1.239, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4347826086956522e-05, |
|
"loss": 1.1827, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4782608695652174e-05, |
|
"loss": 1.2833, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5217391304347828e-05, |
|
"loss": 1.2571, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.565217391304348e-05, |
|
"loss": 1.1767, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6086956521739132e-05, |
|
"loss": 1.228, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6521739130434785e-05, |
|
"loss": 1.2042, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6956521739130437e-05, |
|
"loss": 1.209, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.739130434782609e-05, |
|
"loss": 1.203, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.782608695652174e-05, |
|
"loss": 1.234, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8260869565217393e-05, |
|
"loss": 1.1969, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8695652173913045e-05, |
|
"loss": 1.1641, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9130434782608697e-05, |
|
"loss": 1.2476, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.956521739130435e-05, |
|
"loss": 1.1758, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1608, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9999977592067603e-05, |
|
"loss": 1.1936, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9999910368370826e-05, |
|
"loss": 1.1984, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999798329210938e-05, |
|
"loss": 1.1492, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999964147509006e-05, |
|
"loss": 1.1872, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999439806711137e-05, |
|
"loss": 1.0389, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999193324977974e-05, |
|
"loss": 1.2634, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999890203099519e-05, |
|
"loss": 1.1964, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9998565926068253e-05, |
|
"loss": 1.1295, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999818501170344e-05, |
|
"loss": 1.1741, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997759289607854e-05, |
|
"loss": 1.1604, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997288761689404e-05, |
|
"loss": 1.142, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9996773430056806e-05, |
|
"loss": 1.1625, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9996213297019556e-05, |
|
"loss": 1.1561, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9995608365087945e-05, |
|
"loss": 1.12, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9994958636973024e-05, |
|
"loss": 1.2068, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999426411558661e-05, |
|
"loss": 1.1427, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9993524804041255e-05, |
|
"loss": 1.1102, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9992740705650252e-05, |
|
"loss": 1.1538, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9991911823927607e-05, |
|
"loss": 1.0986, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9991038162588018e-05, |
|
"loss": 1.1527, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999011972554688e-05, |
|
"loss": 1.1093, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9989156516920248e-05, |
|
"loss": 1.1408, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998814854102482e-05, |
|
"loss": 1.1066, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9987095802377933e-05, |
|
"loss": 1.1342, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998599830569752e-05, |
|
"loss": 1.1122, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998485605590211e-05, |
|
"loss": 1.1034, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.99836690581108e-05, |
|
"loss": 1.0664, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9982437317643218e-05, |
|
"loss": 1.0988, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9981160840019513e-05, |
|
"loss": 1.1115, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9979839630960333e-05, |
|
"loss": 1.0505, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9978473696386787e-05, |
|
"loss": 1.0887, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9977063042420438e-05, |
|
"loss": 1.0579, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9975607675383243e-05, |
|
"loss": 1.139, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.997410760179756e-05, |
|
"loss": 1.1476, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.99725628283861e-05, |
|
"loss": 1.1369, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.99709733620719e-05, |
|
"loss": 1.1403, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9969339209978287e-05, |
|
"loss": 1.0507, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9967660379428855e-05, |
|
"loss": 1.1241, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996593687794743e-05, |
|
"loss": 1.0748, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996416871325803e-05, |
|
"loss": 1.0404, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996235589328484e-05, |
|
"loss": 1.1234, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996049842615217e-05, |
|
"loss": 1.0707, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.995859632018442e-05, |
|
"loss": 1.0819, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995664958390604e-05, |
|
"loss": 1.1184, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.99546582260415e-05, |
|
"loss": 1.0907, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995262225551524e-05, |
|
"loss": 1.0984, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9950541681451644e-05, |
|
"loss": 1.0967, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9948416513174976e-05, |
|
"loss": 1.0085, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.994624676020936e-05, |
|
"loss": 1.1173, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9944032432278743e-05, |
|
"loss": 1.0637, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9941773539306818e-05, |
|
"loss": 1.1113, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9939470091417012e-05, |
|
"loss": 1.0784, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9937122098932428e-05, |
|
"loss": 1.0924, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9934729572375792e-05, |
|
"loss": 1.1565, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9932292522469424e-05, |
|
"loss": 1.0751, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.992981096013517e-05, |
|
"loss": 1.0475, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.992728489649437e-05, |
|
"loss": 1.195, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.992471434286779e-05, |
|
"loss": 1.0231, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.99220993107756e-05, |
|
"loss": 1.1734, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9919439811937283e-05, |
|
"loss": 1.114, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9916735858271615e-05, |
|
"loss": 1.0786, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9913987461896597e-05, |
|
"loss": 1.0595, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.991119463512941e-05, |
|
"loss": 1.0714, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9908357390486342e-05, |
|
"loss": 1.0782, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9905475740682756e-05, |
|
"loss": 1.0971, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990254969863301e-05, |
|
"loss": 1.0634, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9899579277450417e-05, |
|
"loss": 1.189, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.989656449044718e-05, |
|
"loss": 1.1077, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9893505351134322e-05, |
|
"loss": 1.0724, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9890401873221642e-05, |
|
"loss": 1.0403, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9887254070617644e-05, |
|
"loss": 1.0682, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.988406195742948e-05, |
|
"loss": 1.1245, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9880825547962882e-05, |
|
"loss": 1.0633, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9877544856722094e-05, |
|
"loss": 1.0761, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.987421989840982e-05, |
|
"loss": 1.0576, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.987085068792715e-05, |
|
"loss": 1.0742, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.986743724037349e-05, |
|
"loss": 1.0786, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.98639795710465e-05, |
|
"loss": 1.1654, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.986047769544203e-05, |
|
"loss": 1.1486, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9856931629254032e-05, |
|
"loss": 1.0743, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9853341388374504e-05, |
|
"loss": 1.1069, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9849706988893433e-05, |
|
"loss": 1.1149, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9846028447098687e-05, |
|
"loss": 1.1201, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.984230577947597e-05, |
|
"loss": 1.0538, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.983853900270874e-05, |
|
"loss": 1.1556, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.983472813367813e-05, |
|
"loss": 1.0697, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9830873189462886e-05, |
|
"loss": 1.1127, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9826974187339267e-05, |
|
"loss": 1.0996, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.982303114478099e-05, |
|
"loss": 1.0341, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9819044079459142e-05, |
|
"loss": 0.9972, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9815013009242103e-05, |
|
"loss": 1.0652, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.981093795219546e-05, |
|
"loss": 1.0404, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.980681892658193e-05, |
|
"loss": 1.0773, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.980265595086129e-05, |
|
"loss": 0.9761, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9798449043690272e-05, |
|
"loss": 1.0444, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9794198223922496e-05, |
|
"loss": 1.0361, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9789903510608374e-05, |
|
"loss": 1.0651, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9785564922995042e-05, |
|
"loss": 1.0096, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.978118248052625e-05, |
|
"loss": 1.012, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9776756202842297e-05, |
|
"loss": 1.022, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977228610977992e-05, |
|
"loss": 1.0339, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976777222137224e-05, |
|
"loss": 1.1113, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9763214557848634e-05, |
|
"loss": 1.0431, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9758613139634662e-05, |
|
"loss": 1.0113, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975396798735198e-05, |
|
"loss": 1.0248, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9749279121818235e-05, |
|
"loss": 1.1174, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9744546564046995e-05, |
|
"loss": 1.0334, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9739770335247616e-05, |
|
"loss": 1.0291, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9734950456825187e-05, |
|
"loss": 1.0916, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9730086950380404e-05, |
|
"loss": 1.045, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9725179837709494e-05, |
|
"loss": 1.0836, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.972022914080411e-05, |
|
"loss": 1.0648, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9715234881851226e-05, |
|
"loss": 1.0938, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9710197083233044e-05, |
|
"loss": 1.0282, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9705115767526894e-05, |
|
"loss": 1.0196, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9699990957505136e-05, |
|
"loss": 1.0088, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9694822676135045e-05, |
|
"loss": 1.0781, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9689610946578725e-05, |
|
"loss": 1.0253, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.968435579219299e-05, |
|
"loss": 1.0348, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9679057236529266e-05, |
|
"loss": 1.079, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9673715303333495e-05, |
|
"loss": 0.9829, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9668330016546004e-05, |
|
"loss": 1.0186, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9662901400301432e-05, |
|
"loss": 1.0874, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.965742947892858e-05, |
|
"loss": 1.0479, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.965191427695035e-05, |
|
"loss": 1.0716, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964635581908359e-05, |
|
"loss": 1.0624, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9640754130239004e-05, |
|
"loss": 1.0209, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9635109235521057e-05, |
|
"loss": 1.044, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9629421160227825e-05, |
|
"loss": 1.0349, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.962368992985091e-05, |
|
"loss": 1.052, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9617915570075317e-05, |
|
"loss": 1.1025, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.961209810677934e-05, |
|
"loss": 1.0033, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9606237566034443e-05, |
|
"loss": 1.0843, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9600333974105145e-05, |
|
"loss": 0.9795, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9594387357448908e-05, |
|
"loss": 1.0159, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9588397742716004e-05, |
|
"loss": 1.04, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.958236515674941e-05, |
|
"loss": 1.048, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9576289626584685e-05, |
|
"loss": 1.0624, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.957017117944984e-05, |
|
"loss": 1.0334, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9564009842765225e-05, |
|
"loss": 1.0801, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9557805644143408e-05, |
|
"loss": 1.038, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.955155861138903e-05, |
|
"loss": 1.0932, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9545268772498723e-05, |
|
"loss": 1.0315, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9538936155660934e-05, |
|
"loss": 1.0132, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9532560789255838e-05, |
|
"loss": 1.0522, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9526142701855187e-05, |
|
"loss": 1.026, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9519681922222195e-05, |
|
"loss": 1.0, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951317847931141e-05, |
|
"loss": 1.0203, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.950663240226857e-05, |
|
"loss": 1.0467, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9500043720430484e-05, |
|
"loss": 1.021, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9493412463324903e-05, |
|
"loss": 1.05, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9486738660670373e-05, |
|
"loss": 0.9964, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9480022342376125e-05, |
|
"loss": 1.0743, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9473263538541916e-05, |
|
"loss": 1.1076, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.946646227945791e-05, |
|
"loss": 1.0902, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.945961859560454e-05, |
|
"loss": 1.0278, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9452732517652366e-05, |
|
"loss": 1.0401, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9445804076461946e-05, |
|
"loss": 1.0421, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9438833303083677e-05, |
|
"loss": 1.0122, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.943182022875769e-05, |
|
"loss": 1.0435, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.942476488491369e-05, |
|
"loss": 1.0039, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9417667303170803e-05, |
|
"loss": 1.0056, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.941052751533745e-05, |
|
"loss": 1.0392, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.940334555341122e-05, |
|
"loss": 1.0488, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9396121449578694e-05, |
|
"loss": 1.0113, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.938885523621531e-05, |
|
"loss": 1.0754, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9381546945885236e-05, |
|
"loss": 0.9854, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9374196611341212e-05, |
|
"loss": 1.0239, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9366804265524388e-05, |
|
"loss": 1.0045, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.935936994156421e-05, |
|
"loss": 1.0204, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.935189367277824e-05, |
|
"loss": 1.041, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9344375492672024e-05, |
|
"loss": 1.033, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9336815434938935e-05, |
|
"loss": 1.0796, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.932921353346003e-05, |
|
"loss": 1.0618, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.932156982230388e-05, |
|
"loss": 1.0393, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9313884335726443e-05, |
|
"loss": 0.9881, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9306157108170892e-05, |
|
"loss": 0.9971, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9298388174267463e-05, |
|
"loss": 1.0703, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9290577568833305e-05, |
|
"loss": 1.0303, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9282725326872324e-05, |
|
"loss": 0.9709, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.927483148357502e-05, |
|
"loss": 0.9944, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9266896074318335e-05, |
|
"loss": 0.999, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9258919134665494e-05, |
|
"loss": 1.0798, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9250900700365837e-05, |
|
"loss": 0.9999, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9242840807354676e-05, |
|
"loss": 1.1024, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.923473949175311e-05, |
|
"loss": 0.9646, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9226596789867895e-05, |
|
"loss": 0.9891, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.921841273819125e-05, |
|
"loss": 1.0312, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.921018737340071e-05, |
|
"loss": 1.0352, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.920192073235896e-05, |
|
"loss": 0.9777, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9193612852113666e-05, |
|
"loss": 0.9957, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.918526376989731e-05, |
|
"loss": 1.0297, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9176873523127025e-05, |
|
"loss": 1.0033, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9168442149404438e-05, |
|
"loss": 0.9653, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9159969686515462e-05, |
|
"loss": 0.9856, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9151456172430186e-05, |
|
"loss": 1.0076, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9142901645302654e-05, |
|
"loss": 1.0059, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9134306143470722e-05, |
|
"loss": 1.0037, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.912566970545587e-05, |
|
"loss": 1.0349, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.911699236996305e-05, |
|
"loss": 1.0429, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9108274175880483e-05, |
|
"loss": 0.9261, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9099515162279515e-05, |
|
"loss": 1.0949, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.909071536841442e-05, |
|
"loss": 1.0227, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9081874833722234e-05, |
|
"loss": 0.9658, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9072993597822586e-05, |
|
"loss": 1.0351, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.906407170051749e-05, |
|
"loss": 1.0135, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.905510918179121e-05, |
|
"loss": 0.9789, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9046106081810047e-05, |
|
"loss": 1.0324, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.903706244092217e-05, |
|
"loss": 0.9867, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9027978299657436e-05, |
|
"loss": 1.0064, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9018853698727216e-05, |
|
"loss": 1.0491, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 1.0126, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9000483281622198e-05, |
|
"loss": 1.0314, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8991237547776014e-05, |
|
"loss": 0.9933, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8981951518921194e-05, |
|
"loss": 1.0303, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8972625236673887e-05, |
|
"loss": 0.9933, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.896325874283063e-05, |
|
"loss": 1.0585, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.895385207936817e-05, |
|
"loss": 1.0379, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.894440528844329e-05, |
|
"loss": 1.0336, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8934918412392596e-05, |
|
"loss": 1.0545, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8925391493732346e-05, |
|
"loss": 1.0106, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.891582457515825e-05, |
|
"loss": 0.9264, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.890621769954528e-05, |
|
"loss": 1.0619, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8896570909947477e-05, |
|
"loss": 0.9478, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8886884249597763e-05, |
|
"loss": 1.0347, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.887715776190775e-05, |
|
"loss": 1.0231, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8867391490467534e-05, |
|
"loss": 0.9832, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8857585479045493e-05, |
|
"loss": 0.9978, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.884773977158813e-05, |
|
"loss": 0.9926, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8837854412219828e-05, |
|
"loss": 0.9447, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8827929445242678e-05, |
|
"loss": 1.0038, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8817964915136277e-05, |
|
"loss": 0.9859, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8807960866557535e-05, |
|
"loss": 1.024, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8797917344340453e-05, |
|
"loss": 0.9742, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8787834393495952e-05, |
|
"loss": 0.9774, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8777712059211643e-05, |
|
"loss": 1.0539, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8767550386851646e-05, |
|
"loss": 1.0163, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.875734942195637e-05, |
|
"loss": 1.0675, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8747109210242326e-05, |
|
"loss": 1.0296, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8736829797601903e-05, |
|
"loss": 1.003, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8726511230103183e-05, |
|
"loss": 0.9922, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8716153553989716e-05, |
|
"loss": 0.9657, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8705756815680324e-05, |
|
"loss": 0.9735, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8695321061768886e-05, |
|
"loss": 0.9142, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8684846339024145e-05, |
|
"loss": 0.9359, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8674332694389464e-05, |
|
"loss": 0.9541, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8663780174982656e-05, |
|
"loss": 1.0237, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8653188828095754e-05, |
|
"loss": 0.9368, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8642558701194787e-05, |
|
"loss": 1.0263, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8631889841919596e-05, |
|
"loss": 0.9919, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.862118229808359e-05, |
|
"loss": 1.001, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8610436117673557e-05, |
|
"loss": 1.0349, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.859965134884943e-05, |
|
"loss": 0.9706, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8588828039944086e-05, |
|
"loss": 1.0074, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.857796623946312e-05, |
|
"loss": 0.9529, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8567065996084628e-05, |
|
"loss": 0.9359, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8556127358658995e-05, |
|
"loss": 0.9875, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.854515037620867e-05, |
|
"loss": 1.0257, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.853413509792795e-05, |
|
"loss": 1.0232, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8523081573182754e-05, |
|
"loss": 0.9977, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8511989851510416e-05, |
|
"loss": 1.0244, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8500859982619438e-05, |
|
"loss": 0.9651, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.848969201638929e-05, |
|
"loss": 1.0111, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.847848600287019e-05, |
|
"loss": 1.0351, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8467241992282842e-05, |
|
"loss": 0.9433, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.845596003501826e-05, |
|
"loss": 1.0271, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8444640181637514e-05, |
|
"loss": 0.9916, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8433282482871497e-05, |
|
"loss": 0.9801, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8421886989620724e-05, |
|
"loss": 0.9983, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.841045375295508e-05, |
|
"loss": 0.9564, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.839898282411361e-05, |
|
"loss": 1.0238, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8387474254504265e-05, |
|
"loss": 0.9463, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8375928095703703e-05, |
|
"loss": 0.9783, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8364344399457028e-05, |
|
"loss": 0.9805, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.835272321767758e-05, |
|
"loss": 0.9801, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8341064602446686e-05, |
|
"loss": 0.9734, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.832936860601344e-05, |
|
"loss": 0.9487, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8317635280794466e-05, |
|
"loss": 0.9983, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8305864679373667e-05, |
|
"loss": 0.9424, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.829405685450202e-05, |
|
"loss": 1.0011, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8282211859097308e-05, |
|
"loss": 0.9649, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8270329746243903e-05, |
|
"loss": 0.9664, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8258410569192526e-05, |
|
"loss": 1.0741, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.824645438135999e-05, |
|
"loss": 0.9481, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8234461236328992e-05, |
|
"loss": 0.9758, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.822243118784785e-05, |
|
"loss": 0.9374, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.821036428983026e-05, |
|
"loss": 1.0554, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8198260596355077e-05, |
|
"loss": 0.9472, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.818612016166604e-05, |
|
"loss": 0.9414, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8173943040171567e-05, |
|
"loss": 1.0546, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8161729286444474e-05, |
|
"loss": 1.058, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.814947895522176e-05, |
|
"loss": 0.9709, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8137192101404343e-05, |
|
"loss": 0.9947, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8124868780056814e-05, |
|
"loss": 0.9748, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.811250904640721e-05, |
|
"loss": 0.9811, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8100112955846746e-05, |
|
"loss": 1.018, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8087680563929574e-05, |
|
"loss": 0.9788, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.807521192637253e-05, |
|
"loss": 0.9772, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.806270709905489e-05, |
|
"loss": 0.9929, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.805016613801813e-05, |
|
"loss": 1.007, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8037589099465637e-05, |
|
"loss": 0.9805, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8024976039762507e-05, |
|
"loss": 0.9382, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8012327015435255e-05, |
|
"loss": 0.9942, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7999642083171576e-05, |
|
"loss": 1.0483, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.798692129982009e-05, |
|
"loss": 0.977, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7974164722390094e-05, |
|
"loss": 0.9409, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.796137240805129e-05, |
|
"loss": 0.9485, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7948544414133534e-05, |
|
"loss": 0.9804, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7935680798126597e-05, |
|
"loss": 1.0293, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.792278161767989e-05, |
|
"loss": 1.0039, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.79098469306022e-05, |
|
"loss": 0.9322, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7896876794861443e-05, |
|
"loss": 0.9763, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.788387126858441e-05, |
|
"loss": 0.9941, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7870830410056487e-05, |
|
"loss": 1.0175, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7857754277721417e-05, |
|
"loss": 0.9435, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7844642930181008e-05, |
|
"loss": 0.9538, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7831496426194906e-05, |
|
"loss": 1.0082, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.78183148246803e-05, |
|
"loss": 1.0124, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.780509818471168e-05, |
|
"loss": 0.9355, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.779184656552056e-05, |
|
"loss": 1.076, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7778560026495218e-05, |
|
"loss": 0.9652, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7765238627180424e-05, |
|
"loss": 0.9779, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.775188242727719e-05, |
|
"loss": 0.9888, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.773849148664247e-05, |
|
"loss": 0.956, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.772506586528893e-05, |
|
"loss": 0.9886, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7711605623384653e-05, |
|
"loss": 0.9966, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7698110821252873e-05, |
|
"loss": 0.9489, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7684581519371714e-05, |
|
"loss": 0.9426, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7671017778373914e-05, |
|
"loss": 0.9119, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7657419659046555e-05, |
|
"loss": 0.9933, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7643787222330773e-05, |
|
"loss": 0.9659, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7630120529321518e-05, |
|
"loss": 0.9642, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7616419641267267e-05, |
|
"loss": 1.0612, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.760268461956972e-05, |
|
"loss": 0.9142, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.758891552578357e-05, |
|
"loss": 0.987, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7575112421616203e-05, |
|
"loss": 0.9447, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7561275368927422e-05, |
|
"loss": 1.0042, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7547404429729174e-05, |
|
"loss": 0.9916, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.753349966618528e-05, |
|
"loss": 0.9748, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.751956114061113e-05, |
|
"loss": 0.9446, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7505588915473437e-05, |
|
"loss": 0.9261, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7491583053389937e-05, |
|
"loss": 0.9571, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.747754361712911e-05, |
|
"loss": 0.9491, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7463470669609907e-05, |
|
"loss": 0.9405, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7449364273901457e-05, |
|
"loss": 0.931, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.743522449322279e-05, |
|
"loss": 0.9834, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.742105139094256e-05, |
|
"loss": 0.9414, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7406845030578747e-05, |
|
"loss": 0.9359, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7392605475798387e-05, |
|
"loss": 0.9825, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7378332790417275e-05, |
|
"loss": 0.9679, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7364027038399682e-05, |
|
"loss": 0.9437, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.734968828385808e-05, |
|
"loss": 0.9085, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7335316591052828e-05, |
|
"loss": 0.9587, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7320912024391915e-05, |
|
"loss": 1.014, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7306474648430653e-05, |
|
"loss": 0.9894, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.729200452787139e-05, |
|
"loss": 0.9684, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7277501727563227e-05, |
|
"loss": 1.0049, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.726296631250171e-05, |
|
"loss": 0.9846, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.724839834782856e-05, |
|
"loss": 0.9728, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7233797898831376e-05, |
|
"loss": 0.9802, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7219165030943327e-05, |
|
"loss": 0.9484, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.720449980974288e-05, |
|
"loss": 0.9905, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7189802300953488e-05, |
|
"loss": 0.9704, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.717507257044331e-05, |
|
"loss": 0.9493, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.716031068422491e-05, |
|
"loss": 0.9792, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7145516708454953e-05, |
|
"loss": 0.9278, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7130690709433927e-05, |
|
"loss": 0.9525, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.711583275360582e-05, |
|
"loss": 0.9911, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.710094290755785e-05, |
|
"loss": 0.8888, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7086021238020154e-05, |
|
"loss": 0.9815, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.9102, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7056082696108896e-05, |
|
"loss": 0.9677, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.70410659579075e-05, |
|
"loss": 0.8997, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7026017664560108e-05, |
|
"loss": 0.9211, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7010937883506936e-05, |
|
"loss": 0.9918, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.699582668232934e-05, |
|
"loss": 0.9244, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.698068412874946e-05, |
|
"loss": 0.9245, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6965510290629973e-05, |
|
"loss": 0.9079, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.695030523597374e-05, |
|
"loss": 0.9392, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6935069032923525e-05, |
|
"loss": 0.9178, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6919801749761696e-05, |
|
"loss": 1.0048, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6904503454909905e-05, |
|
"loss": 0.9467, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6889174216928776e-05, |
|
"loss": 0.9948, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6873814104517617e-05, |
|
"loss": 0.9644, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6858423186514108e-05, |
|
"loss": 0.9621, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6843001531893967e-05, |
|
"loss": 0.932, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6827549209770676e-05, |
|
"loss": 0.9902, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6812066289395157e-05, |
|
"loss": 0.9288, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6796552840155453e-05, |
|
"loss": 0.9686, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6781008931576433e-05, |
|
"loss": 0.9789, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.676543463331946e-05, |
|
"loss": 0.9389, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6749830015182106e-05, |
|
"loss": 0.9272, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6734195147097815e-05, |
|
"loss": 0.9309, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6718530099135596e-05, |
|
"loss": 0.9441, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6702834941499718e-05, |
|
"loss": 0.9532, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6687109744529394e-05, |
|
"loss": 0.9301, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6671354578698444e-05, |
|
"loss": 0.9464, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.665556951461501e-05, |
|
"loss": 0.9501, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6639754623021227e-05, |
|
"loss": 0.8981, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6623909974792888e-05, |
|
"loss": 1.0658, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6608035640939165e-05, |
|
"loss": 0.9744, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6592131692602257e-05, |
|
"loss": 0.9901, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.657619820105708e-05, |
|
"loss": 0.9162, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.656023523771095e-05, |
|
"loss": 0.9462, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6544242874103278e-05, |
|
"loss": 0.909, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6528221181905217e-05, |
|
"loss": 0.9495, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6512170232919372e-05, |
|
"loss": 0.9533, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6496090099079452e-05, |
|
"loss": 0.9985, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6479980852449974e-05, |
|
"loss": 0.9243, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6463842565225914e-05, |
|
"loss": 0.9321, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6447675309732403e-05, |
|
"loss": 0.9812, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.64314791584244e-05, |
|
"loss": 0.9181, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6415254183886356e-05, |
|
"loss": 0.9205, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6399000458831893e-05, |
|
"loss": 0.9581, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6382718056103486e-05, |
|
"loss": 0.9824, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6366407048672135e-05, |
|
"loss": 0.9144, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6350067509637028e-05, |
|
"loss": 0.9512, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6333699512225224e-05, |
|
"loss": 0.9713, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6317303129791315e-05, |
|
"loss": 0.9305, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6300878435817115e-05, |
|
"loss": 0.9706, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.62844255039113e-05, |
|
"loss": 1.0021, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.626794440780911e-05, |
|
"loss": 0.8972, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6251435221372007e-05, |
|
"loss": 0.9839, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.9504, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6218332873568e-05, |
|
"loss": 0.9305, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.620173986055213e-05, |
|
"loss": 0.93, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6185119053902748e-05, |
|
"loss": 0.9323, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.616847052810744e-05, |
|
"loss": 0.8579, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6151794357778006e-05, |
|
"loss": 0.9636, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.613509061765015e-05, |
|
"loss": 0.9515, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.611835938258313e-05, |
|
"loss": 0.9269, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6101600727559423e-05, |
|
"loss": 0.901, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6084814727684386e-05, |
|
"loss": 0.9334, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6068001458185934e-05, |
|
"loss": 0.9775, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6051160994414187e-05, |
|
"loss": 0.961, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.603429341184114e-05, |
|
"loss": 0.9844, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.601739878606032e-05, |
|
"loss": 1.0144, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.600047719278645e-05, |
|
"loss": 0.9194, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5983528707855128e-05, |
|
"loss": 0.9532, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.596655340722244e-05, |
|
"loss": 0.939, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5949551366964675e-05, |
|
"loss": 0.9464, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.593252266327794e-05, |
|
"loss": 0.99, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5915467372477843e-05, |
|
"loss": 0.9315, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5898385570999146e-05, |
|
"loss": 0.9315, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5881277335395425e-05, |
|
"loss": 0.9095, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5864142742338707e-05, |
|
"loss": 0.9562, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.584698186861916e-05, |
|
"loss": 0.9453, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5829794791144723e-05, |
|
"loss": 0.8973, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5812581586940767e-05, |
|
"loss": 0.8567, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5795342333149757e-05, |
|
"loss": 0.9142, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.57780771070309e-05, |
|
"loss": 0.8777, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.57607859859598e-05, |
|
"loss": 0.9424, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5743469047428113e-05, |
|
"loss": 0.908, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.572612636904319e-05, |
|
"loss": 0.9652, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5708758028527754e-05, |
|
"loss": 0.9331, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5691364103719515e-05, |
|
"loss": 0.9343, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5673944672570858e-05, |
|
"loss": 0.9205, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.565649981314847e-05, |
|
"loss": 0.9272, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5639029603632995e-05, |
|
"loss": 0.9948, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5621534122318682e-05, |
|
"loss": 0.9337, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5604013447613057e-05, |
|
"loss": 0.9129, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5586467658036526e-05, |
|
"loss": 0.9152, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.556889683222207e-05, |
|
"loss": 0.9607, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5551301048914863e-05, |
|
"loss": 1.0045, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5533680386971927e-05, |
|
"loss": 0.8021, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5516034925361785e-05, |
|
"loss": 0.7985, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5498364743164098e-05, |
|
"loss": 0.7471, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5480669919569313e-05, |
|
"loss": 0.6998, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5462950533878318e-05, |
|
"loss": 0.7766, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.544520666550207e-05, |
|
"loss": 0.718, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5427438393961246e-05, |
|
"loss": 0.7452, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.54096457988859e-05, |
|
"loss": 0.7485, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5391828960015074e-05, |
|
"loss": 0.7644, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.537398795719648e-05, |
|
"loss": 0.8274, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5356122870386117e-05, |
|
"loss": 0.7165, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.533823377964791e-05, |
|
"loss": 0.8017, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5320320765153367e-05, |
|
"loss": 0.6981, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5302383907181212e-05, |
|
"loss": 0.7214, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5284423286117035e-05, |
|
"loss": 0.7203, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.52664389824529e-05, |
|
"loss": 0.6644, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.524843107678702e-05, |
|
"loss": 0.8209, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5230399649823389e-05, |
|
"loss": 0.7709, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.52123447823714e-05, |
|
"loss": 0.7315, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5194266555345505e-05, |
|
"loss": 0.7588, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5176165049764846e-05, |
|
"loss": 0.7513, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5158040346752878e-05, |
|
"loss": 0.7339, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.513989252753703e-05, |
|
"loss": 0.7708, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5121721673448319e-05, |
|
"loss": 0.7232, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5103527865921005e-05, |
|
"loss": 0.7556, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5085311186492206e-05, |
|
"loss": 0.7018, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5067071716801546e-05, |
|
"loss": 0.6902, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5048809538590789e-05, |
|
"loss": 0.7054, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5030524733703464e-05, |
|
"loss": 0.796, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5012217384084501e-05, |
|
"loss": 0.7685, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.4993887571779877e-05, |
|
"loss": 0.7459, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.4975535378936228e-05, |
|
"loss": 0.6768, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.4957160887800494e-05, |
|
"loss": 0.7152, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4938764180719548e-05, |
|
"loss": 0.7735, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4920345340139819e-05, |
|
"loss": 0.7751, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.490190444860694e-05, |
|
"loss": 0.7263, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4883441588765355e-05, |
|
"loss": 0.6805, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4864956843357967e-05, |
|
"loss": 0.7375, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4846450295225769e-05, |
|
"loss": 0.7809, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.482792202730745e-05, |
|
"loss": 0.6919, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4809372122639047e-05, |
|
"loss": 0.7146, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4790800664353562e-05, |
|
"loss": 0.7679, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.477220773568059e-05, |
|
"loss": 0.7337, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.475359341994595e-05, |
|
"loss": 0.7381, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.473495780057131e-05, |
|
"loss": 0.7658, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4716300961073808e-05, |
|
"loss": 0.7123, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.469762298506568e-05, |
|
"loss": 0.6939, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4678923956253894e-05, |
|
"loss": 0.7063, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4660203958439768e-05, |
|
"loss": 0.7473, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4641463075518587e-05, |
|
"loss": 0.7373, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4622701391479237e-05, |
|
"loss": 0.7771, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.460391899040383e-05, |
|
"loss": 0.718, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4585115956467322e-05, |
|
"loss": 0.7029, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4566292373937133e-05, |
|
"loss": 0.7365, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4547448327172778e-05, |
|
"loss": 0.7436, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4528583900625481e-05, |
|
"loss": 0.7444, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.45096991788378e-05, |
|
"loss": 0.7441, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4490794246443249e-05, |
|
"loss": 0.7738, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4471869188165923e-05, |
|
"loss": 0.7806, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4452924088820101e-05, |
|
"loss": 0.6921, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4433959033309888e-05, |
|
"loss": 0.7087, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4414974106628816e-05, |
|
"loss": 0.7206, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4395969393859478e-05, |
|
"loss": 0.7585, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4376944980173138e-05, |
|
"loss": 0.7575, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.435790095082935e-05, |
|
"loss": 0.7606, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4338837391175582e-05, |
|
"loss": 0.6768, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4319754386646824e-05, |
|
"loss": 0.7467, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4300652022765207e-05, |
|
"loss": 0.7047, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4281530385139627e-05, |
|
"loss": 0.7383, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4262389559465364e-05, |
|
"loss": 0.7478, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4243229631523678e-05, |
|
"loss": 0.722, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4224050687181442e-05, |
|
"loss": 0.6896, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4204852812390757e-05, |
|
"loss": 0.689, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4185636093188558e-05, |
|
"loss": 0.7549, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4166400615696232e-05, |
|
"loss": 0.7865, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4147146466119235e-05, |
|
"loss": 0.7154, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4127873730746706e-05, |
|
"loss": 0.7073, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4108582495951077e-05, |
|
"loss": 0.7755, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4089272848187678e-05, |
|
"loss": 0.7189, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.406994487399437e-05, |
|
"loss": 0.754, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4050598659991144e-05, |
|
"loss": 0.7286, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4031234292879726e-05, |
|
"loss": 0.6891, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.40118518594432e-05, |
|
"loss": 0.7654, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.3992451446545624e-05, |
|
"loss": 0.7363, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.397303314113162e-05, |
|
"loss": 0.7878, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3953597030226006e-05, |
|
"loss": 0.7313, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3934143200933398e-05, |
|
"loss": 0.6866, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3914671740437811e-05, |
|
"loss": 0.7035, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3895182736002276e-05, |
|
"loss": 0.663, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3875676274968453e-05, |
|
"loss": 0.7065, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3856152444756235e-05, |
|
"loss": 0.7959, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3836611332863356e-05, |
|
"loss": 0.7671, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3817053026865e-05, |
|
"loss": 0.7599, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3797477614413404e-05, |
|
"loss": 0.7261, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3777885183237473e-05, |
|
"loss": 0.7651, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3758275821142382e-05, |
|
"loss": 0.7575, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3738649616009183e-05, |
|
"loss": 0.7243, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3719006655794414e-05, |
|
"loss": 0.7304, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3699347028529694e-05, |
|
"loss": 0.7264, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3679670822321347e-05, |
|
"loss": 0.7004, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3659978125349991e-05, |
|
"loss": 0.7229, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3640269025870152e-05, |
|
"loss": 0.6133, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3620543612209861e-05, |
|
"loss": 0.7549, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3600801972770272e-05, |
|
"loss": 0.7877, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3581044196025243e-05, |
|
"loss": 0.7258, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3561270370520957e-05, |
|
"loss": 0.7429, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3541480584875531e-05, |
|
"loss": 0.7792, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3521674927778594e-05, |
|
"loss": 0.717, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3501853487990917e-05, |
|
"loss": 0.7192, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.348201635434399e-05, |
|
"loss": 0.7398, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3462163615739647e-05, |
|
"loss": 0.7234, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3442295361149651e-05, |
|
"loss": 0.7678, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3422411679615305e-05, |
|
"loss": 0.7264, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3402512660247046e-05, |
|
"loss": 0.7632, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3382598392224053e-05, |
|
"loss": 0.7015, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.336266896479384e-05, |
|
"loss": 0.7232, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3342724467271852e-05, |
|
"loss": 0.765, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3322764989041086e-05, |
|
"loss": 0.7443, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3302790619551673e-05, |
|
"loss": 0.7564, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.328280144832047e-05, |
|
"loss": 0.7023, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.326279756493068e-05, |
|
"loss": 0.7387, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3242779059031436e-05, |
|
"loss": 0.6726, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3222746020337406e-05, |
|
"loss": 0.7099, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3202698538628376e-05, |
|
"loss": 0.7268, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3182636703748881e-05, |
|
"loss": 0.7254, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3162560605607763e-05, |
|
"loss": 0.6561, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3142470334177794e-05, |
|
"loss": 0.7076, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3122365979495259e-05, |
|
"loss": 0.6512, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3102247631659564e-05, |
|
"loss": 0.6363, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3082115380832829e-05, |
|
"loss": 0.7633, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.306196931723947e-05, |
|
"loss": 0.716, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3041809531165819e-05, |
|
"loss": 0.7556, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3021636112959696e-05, |
|
"loss": 0.7228, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3001449153030024e-05, |
|
"loss": 0.756, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2981248741846404e-05, |
|
"loss": 0.773, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2961034969938732e-05, |
|
"loss": 0.7269, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2940807927896773e-05, |
|
"loss": 0.6693, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.292056770636976e-05, |
|
"loss": 0.6987, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2900314396066e-05, |
|
"loss": 0.6602, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.288004808775246e-05, |
|
"loss": 0.7263, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2859768872254344e-05, |
|
"loss": 0.7176, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2839476840454714e-05, |
|
"loss": 0.7196, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2819172083294068e-05, |
|
"loss": 0.727, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2798854691769927e-05, |
|
"loss": 0.7524, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2778524756936438e-05, |
|
"loss": 0.7204, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2758182369903969e-05, |
|
"loss": 0.7483, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2737827621838678e-05, |
|
"loss": 0.7504, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2717460603962132e-05, |
|
"loss": 0.739, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2697081407550886e-05, |
|
"loss": 0.7273, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2676690123936066e-05, |
|
"loss": 0.7328, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2656286844502978e-05, |
|
"loss": 0.693, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2635871660690677e-05, |
|
"loss": 0.7774, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.261544466399158e-05, |
|
"loss": 0.7683, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.259500594595104e-05, |
|
"loss": 0.7132, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2574555598166932e-05, |
|
"loss": 0.7224, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2554093712289267e-05, |
|
"loss": 0.7322, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2533620380019752e-05, |
|
"loss": 0.7392, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2513135693111399e-05, |
|
"loss": 0.7136, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2492639743368096e-05, |
|
"loss": 0.7039, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2472132622644222e-05, |
|
"loss": 0.7226, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2451614422844208e-05, |
|
"loss": 0.7374, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2431085235922143e-05, |
|
"loss": 0.7166, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2410545153881355e-05, |
|
"loss": 0.7439, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2389994268773995e-05, |
|
"loss": 0.7176, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2369432672700634e-05, |
|
"loss": 0.7174, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.234886045780984e-05, |
|
"loss": 0.7341, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2328277716297771e-05, |
|
"loss": 0.7129, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2307684540407775e-05, |
|
"loss": 0.7583, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2287081022429935e-05, |
|
"loss": 0.7298, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2266467254700712e-05, |
|
"loss": 0.7048, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.224584332960248e-05, |
|
"loss": 0.7671, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.7035, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.220456537705572e-05, |
|
"loss": 0.7289, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2183911534597908e-05, |
|
"loss": 0.7258, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2163247904751688e-05, |
|
"loss": 0.7627, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2142574580122903e-05, |
|
"loss": 0.6861, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2121891653360851e-05, |
|
"loss": 0.6678, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.210119921715785e-05, |
|
"loss": 0.7285, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2080497364248849e-05, |
|
"loss": 0.7086, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2059786187410984e-05, |
|
"loss": 0.6572, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2039065779463195e-05, |
|
"loss": 0.7477, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2018336233265775e-05, |
|
"loss": 0.7336, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1997597641719976e-05, |
|
"loss": 0.6717, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1976850097767598e-05, |
|
"loss": 0.7632, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1956093694390547e-05, |
|
"loss": 0.6646, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1935328524610443e-05, |
|
"loss": 0.7142, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1914554681488188e-05, |
|
"loss": 0.7662, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1893772258123554e-05, |
|
"loss": 0.6749, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1872981347654778e-05, |
|
"loss": 0.6461, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1852182043258112e-05, |
|
"loss": 0.7511, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1831374438147445e-05, |
|
"loss": 0.7491, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1810558625573856e-05, |
|
"loss": 0.7359, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1789734698825209e-05, |
|
"loss": 0.7372, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.176890275122573e-05, |
|
"loss": 0.7181, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.17480628761356e-05, |
|
"loss": 0.7166, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1727215166950519e-05, |
|
"loss": 0.7076, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1706359717101295e-05, |
|
"loss": 0.6737, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1685496620053434e-05, |
|
"loss": 0.7461, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1664625969306707e-05, |
|
"loss": 0.7243, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1643747858394743e-05, |
|
"loss": 0.6899, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.16228623808846e-05, |
|
"loss": 0.8022, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1601969630376353e-05, |
|
"loss": 0.7704, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.158106970050267e-05, |
|
"loss": 0.7285, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.156016268492839e-05, |
|
"loss": 0.6435, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1539248677350114e-05, |
|
"loss": 0.7342, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.151832777149578e-05, |
|
"loss": 0.7427, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1497400061124231e-05, |
|
"loss": 0.6811, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1476465640024814e-05, |
|
"loss": 0.7563, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1455524602016946e-05, |
|
"loss": 0.6817, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1434577040949699e-05, |
|
"loss": 0.7575, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1413623050701381e-05, |
|
"loss": 0.7814, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1392662725179114e-05, |
|
"loss": 0.7329, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1371696158318404e-05, |
|
"loss": 0.7242, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1350723444082736e-05, |
|
"loss": 0.722, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1329744676463144e-05, |
|
"loss": 0.6673, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1308759949477786e-05, |
|
"loss": 0.7053, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1287769357171536e-05, |
|
"loss": 0.724, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1266772993615543e-05, |
|
"loss": 0.6969, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1245770952906829e-05, |
|
"loss": 0.6824, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1224763329167859e-05, |
|
"loss": 0.785, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1203750216546105e-05, |
|
"loss": 0.6677, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1182731709213658e-05, |
|
"loss": 0.7184, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1161707901366776e-05, |
|
"loss": 0.736, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1140678887225468e-05, |
|
"loss": 0.7286, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1119644761033079e-05, |
|
"loss": 0.7466, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1098605617055871e-05, |
|
"loss": 0.7381, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1077561549582578e-05, |
|
"loss": 0.7431, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1056512652924014e-05, |
|
"loss": 0.7195, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1035459021412631e-05, |
|
"loss": 0.6959, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1014400749402096e-05, |
|
"loss": 0.6758, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.0993337931266879e-05, |
|
"loss": 0.6869, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.0972270661401812e-05, |
|
"loss": 0.697, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.09511990342217e-05, |
|
"loss": 0.6798, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0930123144160857e-05, |
|
"loss": 0.7043, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0909043085672703e-05, |
|
"loss": 0.705, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0887958953229349e-05, |
|
"loss": 0.6515, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0866870841321156e-05, |
|
"loss": 0.6996, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0845778844456319e-05, |
|
"loss": 0.6817, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0824683057160445e-05, |
|
"loss": 0.68, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0803583573976137e-05, |
|
"loss": 0.6627, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0782480489462546e-05, |
|
"loss": 0.6957, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0761373898194972e-05, |
|
"loss": 0.6439, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.074026389476443e-05, |
|
"loss": 0.7282, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0719150573777226e-05, |
|
"loss": 0.717, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0698034029854533e-05, |
|
"loss": 0.7458, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0676914357631968e-05, |
|
"loss": 0.6983, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0655791651759174e-05, |
|
"loss": 0.7865, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0634666006899375e-05, |
|
"loss": 0.7022, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.061353751772898e-05, |
|
"loss": 0.6665, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0592406278937143e-05, |
|
"loss": 0.6883, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0571272385225338e-05, |
|
"loss": 0.7024, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.055013593130693e-05, |
|
"loss": 0.7366, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0528997011906775e-05, |
|
"loss": 0.6668, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0507855721760757e-05, |
|
"loss": 0.6919, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0486712155615408e-05, |
|
"loss": 0.6995, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.046556640822744e-05, |
|
"loss": 0.707, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0444418574363354e-05, |
|
"loss": 0.6681, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0423268748798992e-05, |
|
"loss": 0.7561, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0402117026319128e-05, |
|
"loss": 0.6574, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0380963501717034e-05, |
|
"loss": 0.7075, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0359808269794063e-05, |
|
"loss": 0.7032, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0338651425359217e-05, |
|
"loss": 0.7179, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0317493063228716e-05, |
|
"loss": 0.6361, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0296333278225599e-05, |
|
"loss": 0.6996, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0275172165179268e-05, |
|
"loss": 0.7163, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0254009818925082e-05, |
|
"loss": 0.7143, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0232846334303925e-05, |
|
"loss": 0.6926, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0211681806161787e-05, |
|
"loss": 0.6692, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0190516329349328e-05, |
|
"loss": 0.7666, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0169349998721459e-05, |
|
"loss": 0.6846, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0148182909136928e-05, |
|
"loss": 0.7338, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0127015155457875e-05, |
|
"loss": 0.7034, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0105846832549417e-05, |
|
"loss": 0.6797, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0084678035279227e-05, |
|
"loss": 0.6752, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.00635088585171e-05, |
|
"loss": 0.7522, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0042339397134528e-05, |
|
"loss": 0.7096, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0021169746004287e-05, |
|
"loss": 0.7339, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7047, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.978830253995713e-06, |
|
"loss": 0.7288, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.957660602865477e-06, |
|
"loss": 0.6157, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.936491141482902e-06, |
|
"loss": 0.764, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.915321964720773e-06, |
|
"loss": 0.5981, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.894153167450586e-06, |
|
"loss": 0.6801, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.872984844542128e-06, |
|
"loss": 0.6251, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.851817090863073e-06, |
|
"loss": 0.7278, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.830650001278543e-06, |
|
"loss": 0.682, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.809483670650676e-06, |
|
"loss": 0.7366, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.788318193838218e-06, |
|
"loss": 0.7158, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.767153665696077e-06, |
|
"loss": 0.6826, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.74599018107492e-06, |
|
"loss": 0.7243, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.724827834820736e-06, |
|
"loss": 0.6767, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.703666721774403e-06, |
|
"loss": 0.6239, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.682506936771284e-06, |
|
"loss": 0.6875, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.66134857464079e-06, |
|
"loss": 0.7074, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.64019173020594e-06, |
|
"loss": 0.751, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.619036498282968e-06, |
|
"loss": 0.739, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.597882973680877e-06, |
|
"loss": 0.7522, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.57673125120101e-06, |
|
"loss": 0.6697, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.555581425636648e-06, |
|
"loss": 0.7064, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.534433591772562e-06, |
|
"loss": 0.741, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.513287844384594e-06, |
|
"loss": 0.727, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.492144278239244e-06, |
|
"loss": 0.7944, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.47100298809323e-06, |
|
"loss": 0.6636, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.449864068693072e-06, |
|
"loss": 0.652, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.428727614774667e-06, |
|
"loss": 0.6426, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.407593721062858e-06, |
|
"loss": 0.687, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.38646248227102e-06, |
|
"loss": 0.7082, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.365333993100628e-06, |
|
"loss": 0.6977, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.34420834824083e-06, |
|
"loss": 0.688, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.323085642368032e-06, |
|
"loss": 0.6521, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.301965970145472e-06, |
|
"loss": 0.6808, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.280849426222778e-06, |
|
"loss": 0.7322, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.259736105235572e-06, |
|
"loss": 0.6768, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.23862610180503e-06, |
|
"loss": 0.694, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.217519510537455e-06, |
|
"loss": 0.6836, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.196416426023868e-06, |
|
"loss": 0.6455, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.175316942839557e-06, |
|
"loss": 0.7085, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.154221155543684e-06, |
|
"loss": 0.7161, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.133129158678848e-06, |
|
"loss": 0.7367, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.112041046770653e-06, |
|
"loss": 0.7377, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.090956914327297e-06, |
|
"loss": 0.7225, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.069876855839148e-06, |
|
"loss": 0.7198, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.048800965778304e-06, |
|
"loss": 0.7302, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.027729338598188e-06, |
|
"loss": 0.7122, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.006662068733128e-06, |
|
"loss": 0.6355, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.985599250597906e-06, |
|
"loss": 0.6603, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.96454097858737e-06, |
|
"loss": 0.7309, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.943487347075988e-06, |
|
"loss": 0.7053, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.922438450417424e-06, |
|
"loss": 0.6807, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.901394382944135e-06, |
|
"loss": 0.6972, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.880355238966923e-06, |
|
"loss": 0.6499, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.859321112774535e-06, |
|
"loss": 0.6798, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.838292098633229e-06, |
|
"loss": 0.7812, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.817268290786343e-06, |
|
"loss": 0.6775, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.796249783453897e-06, |
|
"loss": 0.6989, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.775236670832146e-06, |
|
"loss": 0.713, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.754229047093173e-06, |
|
"loss": 0.6839, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.733227006384459e-06, |
|
"loss": 0.7053, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.71223064282847e-06, |
|
"loss": 0.6516, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.691240050522215e-06, |
|
"loss": 0.7223, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.670255323536858e-06, |
|
"loss": 0.7051, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.649276555917267e-06, |
|
"loss": 0.7047, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.6283038416816e-06, |
|
"loss": 0.6676, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.607337274820888e-06, |
|
"loss": 0.7052, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.586376949298622e-06, |
|
"loss": 0.732, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.565422959050303e-06, |
|
"loss": 0.6585, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.54447539798306e-06, |
|
"loss": 0.7222, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.52353435997519e-06, |
|
"loss": 0.7249, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.50259993887577e-06, |
|
"loss": 0.7258, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.481672228504224e-06, |
|
"loss": 0.6656, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.460751322649887e-06, |
|
"loss": 0.6192, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.439837315071612e-06, |
|
"loss": 0.7369, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.418930299497335e-06, |
|
"loss": 0.6739, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.39803036962365e-06, |
|
"loss": 0.6251, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.377137619115401e-06, |
|
"loss": 0.7312, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.35625214160526e-06, |
|
"loss": 0.6744, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.335374030693295e-06, |
|
"loss": 0.7199, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.314503379946569e-06, |
|
"loss": 0.6968, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.293640282898709e-06, |
|
"loss": 0.6898, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.272784833049485e-06, |
|
"loss": 0.653, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.251937123864405e-06, |
|
"loss": 0.7179, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.231097248774273e-06, |
|
"loss": 0.6474, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.210265301174796e-06, |
|
"loss": 0.6794, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.18944137442615e-06, |
|
"loss": 0.7214, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.16862556185256e-06, |
|
"loss": 0.6832, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.14781795674189e-06, |
|
"loss": 0.6768, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.12701865234523e-06, |
|
"loss": 0.7436, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.106227741876447e-06, |
|
"loss": 0.7026, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.085445318511814e-06, |
|
"loss": 0.6797, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.064671475389562e-06, |
|
"loss": 0.6304, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.043906305609454e-06, |
|
"loss": 0.6498, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.023149902232404e-06, |
|
"loss": 0.7137, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.002402358280028e-06, |
|
"loss": 0.7033, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.981663766734228e-06, |
|
"loss": 0.6866, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.960934220536812e-06, |
|
"loss": 0.6916, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.940213812589018e-06, |
|
"loss": 0.6677, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.919502635751154e-06, |
|
"loss": 0.6322, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.898800782842153e-06, |
|
"loss": 0.6705, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.878108346639152e-06, |
|
"loss": 0.6155, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.857425419877097e-06, |
|
"loss": 0.6893, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.836752095248318e-06, |
|
"loss": 0.6839, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.816088465402097e-06, |
|
"loss": 0.7049, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.795434622944282e-06, |
|
"loss": 0.7028, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.685, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.754156670397522e-06, |
|
"loss": 0.6787, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.73353274529929e-06, |
|
"loss": 0.6646, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.712918977570067e-06, |
|
"loss": 0.6078, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.69231545959223e-06, |
|
"loss": 0.6826, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.671722283702227e-06, |
|
"loss": 0.6507, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.651139542190164e-06, |
|
"loss": 0.6907, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.63056732729937e-06, |
|
"loss": 0.7883, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.610005731226009e-06, |
|
"loss": 0.6495, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.589454846118648e-06, |
|
"loss": 0.6702, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.568914764077857e-06, |
|
"loss": 0.7069, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.548385577155795e-06, |
|
"loss": 0.6726, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.52786737735578e-06, |
|
"loss": 0.7035, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.5073602566319045e-06, |
|
"loss": 0.6734, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.486864306888608e-06, |
|
"loss": 0.693, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.466379619980251e-06, |
|
"loss": 0.6852, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.445906287710733e-06, |
|
"loss": 0.6243, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.4254444018330685e-06, |
|
"loss": 0.7032, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.404994054048962e-06, |
|
"loss": 0.7084, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.384555336008419e-06, |
|
"loss": 0.7557, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.364128339309326e-06, |
|
"loss": 0.7065, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.3437131554970254e-06, |
|
"loss": 0.7313, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.323309876063936e-06, |
|
"loss": 0.6442, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.302918592449116e-06, |
|
"loss": 0.717, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.282539396037868e-06, |
|
"loss": 0.6825, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.262172378161326e-06, |
|
"loss": 0.724, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.241817630096035e-06, |
|
"loss": 0.6666, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.2214752430635625e-06, |
|
"loss": 0.7376, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.201145308230075e-06, |
|
"loss": 0.6905, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.180827916705935e-06, |
|
"loss": 0.6808, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.160523159545285e-06, |
|
"loss": 0.6739, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.140231127745661e-06, |
|
"loss": 0.7001, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.119951912247545e-06, |
|
"loss": 0.6609, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.099685603934e-06, |
|
"loss": 0.6754, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.079432293630244e-06, |
|
"loss": 0.7045, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.0591920721032314e-06, |
|
"loss": 0.711, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.038965030061273e-06, |
|
"loss": 0.6554, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.018751258153599e-06, |
|
"loss": 0.653, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.998550846969981e-06, |
|
"loss": 0.6283, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.978363887040308e-06, |
|
"loss": 0.7227, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.9581904688341854e-06, |
|
"loss": 0.6729, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.938030682760532e-06, |
|
"loss": 0.6725, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.917884619167177e-06, |
|
"loss": 0.6944, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.897752368340438e-06, |
|
"loss": 0.674, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.8776340205047446e-06, |
|
"loss": 0.6894, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.857529665822211e-06, |
|
"loss": 0.7036, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.837439394392238e-06, |
|
"loss": 0.7079, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.817363296251119e-06, |
|
"loss": 0.7317, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.797301461371626e-06, |
|
"loss": 0.6547, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.7772539796625995e-06, |
|
"loss": 0.66, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.757220940968567e-06, |
|
"loss": 0.7137, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.737202435069323e-06, |
|
"loss": 0.7429, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.7171985516795315e-06, |
|
"loss": 0.7124, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.697209380448333e-06, |
|
"loss": 0.6986, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.677235010958916e-06, |
|
"loss": 0.7411, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.6572755327281506e-06, |
|
"loss": 0.7564, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.637331035206166e-06, |
|
"loss": 0.6841, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.617401607775948e-06, |
|
"loss": 0.6151, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.597487339752953e-06, |
|
"loss": 0.6966, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.577588320384698e-06, |
|
"loss": 0.6928, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.557704638850352e-06, |
|
"loss": 0.6826, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.537836384260356e-06, |
|
"loss": 0.6567, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.517983645656014e-06, |
|
"loss": 0.6756, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.498146512009087e-06, |
|
"loss": 0.7215, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.4783250722214066e-06, |
|
"loss": 0.6939, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.458519415124473e-06, |
|
"loss": 0.6799, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.438729629479045e-06, |
|
"loss": 0.6584, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.418955803974763e-06, |
|
"loss": 0.7723, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.399198027229732e-06, |
|
"loss": 0.6232, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.3794563877901385e-06, |
|
"loss": 0.6624, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.359730974129852e-06, |
|
"loss": 0.7008, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.340021874650013e-06, |
|
"loss": 0.6804, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.320329177678656e-06, |
|
"loss": 0.6662, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.300652971470309e-06, |
|
"loss": 0.7117, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.28099334420559e-06, |
|
"loss": 0.6513, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.261350383990816e-06, |
|
"loss": 0.6429, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.241724178857621e-06, |
|
"loss": 0.6465, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.22211481676253e-06, |
|
"loss": 0.6981, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.202522385586595e-06, |
|
"loss": 0.7445, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.182946973135004e-06, |
|
"loss": 0.6609, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.163388667136646e-06, |
|
"loss": 0.6613, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.14384755524377e-06, |
|
"loss": 0.6668, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.12432372503155e-06, |
|
"loss": 0.7201, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.104817263997727e-06, |
|
"loss": 0.6761, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.085328259562195e-06, |
|
"loss": 0.6539, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.065856799066604e-06, |
|
"loss": 0.7445, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.046402969773992e-06, |
|
"loss": 0.6962, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.0269668588683815e-06, |
|
"loss": 0.7402, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.007548553454379e-06, |
|
"loss": 0.6802, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.9881481405568005e-06, |
|
"loss": 0.6804, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.96876570712028e-06, |
|
"loss": 0.6454, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.94940134000886e-06, |
|
"loss": 0.6647, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.93005512600563e-06, |
|
"loss": 0.5945, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.910727151812324e-06, |
|
"loss": 0.6822, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.891417504048926e-06, |
|
"loss": 0.6842, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.872126269253298e-06, |
|
"loss": 0.6594, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.852853533880768e-06, |
|
"loss": 0.6616, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.8335993843037695e-06, |
|
"loss": 0.7044, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.8143639068114465e-06, |
|
"loss": 0.7138, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.7951471876092445e-06, |
|
"loss": 0.7043, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.7759493128185584e-06, |
|
"loss": 0.6993, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.756770368476326e-06, |
|
"loss": 0.6703, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.7376104405346376e-06, |
|
"loss": 0.6855, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.7184696148603725e-06, |
|
"loss": 0.663, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.699347977234799e-06, |
|
"loss": 0.6208, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.680245613353181e-06, |
|
"loss": 0.6145, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.66116260882442e-06, |
|
"loss": 0.6527, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.642099049170651e-06, |
|
"loss": 0.697, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.623055019826862e-06, |
|
"loss": 0.7267, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.604030606140522e-06, |
|
"loss": 0.7182, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.585025893371187e-06, |
|
"loss": 0.6791, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.566040966690115e-06, |
|
"loss": 0.6514, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.547075911179902e-06, |
|
"loss": 0.6766, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.528130811834081e-06, |
|
"loss": 0.6695, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.50920575355675e-06, |
|
"loss": 0.6441, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.490300821162204e-06, |
|
"loss": 0.7135, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.471416099374525e-06, |
|
"loss": 0.6637, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.452551672827227e-06, |
|
"loss": 0.7493, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.43370762606287e-06, |
|
"loss": 0.6633, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.41488404353268e-06, |
|
"loss": 0.6803, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.3960810095961705e-06, |
|
"loss": 0.7045, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.377298608520767e-06, |
|
"loss": 0.6783, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.358536924481416e-06, |
|
"loss": 0.7126, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.339796041560233e-06, |
|
"loss": 0.6235, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.321076043746108e-06, |
|
"loss": 0.6988, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.302377014934322e-06, |
|
"loss": 0.6892, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.283699038926199e-06, |
|
"loss": 0.6588, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.265042199428693e-06, |
|
"loss": 0.5018, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.246406580054051e-06, |
|
"loss": 0.5078, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.227792264319411e-06, |
|
"loss": 0.4795, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.20919933564644e-06, |
|
"loss": 0.468, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.190627877360953e-06, |
|
"loss": 0.4694, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.172077972692553e-06, |
|
"loss": 0.5019, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.153549704774232e-06, |
|
"loss": 0.4863, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.1350431566420326e-06, |
|
"loss": 0.4599, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.116558411234651e-06, |
|
"loss": 0.4987, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.098095551393066e-06, |
|
"loss": 0.492, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.079654659860183e-06, |
|
"loss": 0.5082, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.061235819280456e-06, |
|
"loss": 0.5039, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.042839112199509e-06, |
|
"loss": 0.4771, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.024464621063773e-06, |
|
"loss": 0.4692, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.006112428220124e-06, |
|
"loss": 0.5044, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.9877826159154995e-06, |
|
"loss": 0.5087, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.9694752662965415e-06, |
|
"loss": 0.4798, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.951190461409214e-06, |
|
"loss": 0.5208, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.932928283198454e-06, |
|
"loss": 0.4939, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.914688813507798e-06, |
|
"loss": 0.4257, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.896472134078998e-06, |
|
"loss": 0.4926, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.878278326551682e-06, |
|
"loss": 0.4741, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.860107472462976e-06, |
|
"loss": 0.4516, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.8419596532471266e-06, |
|
"loss": 0.4315, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.823834950235158e-06, |
|
"loss": 0.4943, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.805733444654496e-06, |
|
"loss": 0.5163, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.7876552176286005e-06, |
|
"loss": 0.4338, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.7696003501766155e-06, |
|
"loss": 0.4369, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.751568923212983e-06, |
|
"loss": 0.4749, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.733561017547104e-06, |
|
"loss": 0.4832, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.715576713882971e-06, |
|
"loss": 0.4645, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.697616092818789e-06, |
|
"loss": 0.4244, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.679679234846636e-06, |
|
"loss": 0.462, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.661766220352098e-06, |
|
"loss": 0.4808, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.643877129613889e-06, |
|
"loss": 0.4819, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.626012042803521e-06, |
|
"loss": 0.437, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.608171039984928e-06, |
|
"loss": 0.4952, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.590354201114103e-06, |
|
"loss": 0.4745, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.5725616060387525e-06, |
|
"loss": 0.502, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.554793334497933e-06, |
|
"loss": 0.4569, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.5370494661216835e-06, |
|
"loss": 0.4782, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.519330080430687e-06, |
|
"loss": 0.5223, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.501635256835908e-06, |
|
"loss": 0.4394, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.483965074638219e-06, |
|
"loss": 0.412, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.46631961302808e-06, |
|
"loss": 0.4732, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.448698951085143e-06, |
|
"loss": 0.4879, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.43110316777793e-06, |
|
"loss": 0.4409, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.413532341963477e-06, |
|
"loss": 0.4812, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.395986552386947e-06, |
|
"loss": 0.4723, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.378465877681317e-06, |
|
"loss": 0.4799, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.360970396367012e-06, |
|
"loss": 0.4629, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.343500186851534e-06, |
|
"loss": 0.4704, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.326055327429143e-06, |
|
"loss": 0.5018, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.3086358962804885e-06, |
|
"loss": 0.5217, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.29124197147225e-06, |
|
"loss": 0.5263, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.273873630956811e-06, |
|
"loss": 0.4815, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.256530952571889e-06, |
|
"loss": 0.5306, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.2392140140401996e-06, |
|
"loss": 0.476, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.221922892969103e-06, |
|
"loss": 0.4927, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.204657666850245e-06, |
|
"loss": 0.4636, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.187418413059234e-06, |
|
"loss": 0.4632, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.170205208855281e-06, |
|
"loss": 0.4789, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.153018131380843e-06, |
|
"loss": 0.4737, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.135857257661295e-06, |
|
"loss": 0.4863, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.118722664604582e-06, |
|
"loss": 0.4595, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.101614429000857e-06, |
|
"loss": 0.4512, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.08453262752216e-06, |
|
"loss": 0.4231, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.067477336722063e-06, |
|
"loss": 0.4239, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.050448633035326e-06, |
|
"loss": 0.4831, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.033446592777558e-06, |
|
"loss": 0.4813, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.016471292144875e-06, |
|
"loss": 0.4585, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.999522807213549e-06, |
|
"loss": 0.5003, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.982601213939686e-06, |
|
"loss": 0.5151, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.965706588158865e-06, |
|
"loss": 0.4675, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.948839005585815e-06, |
|
"loss": 0.4688, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.931998541814069e-06, |
|
"loss": 0.446, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.915185272315616e-06, |
|
"loss": 0.4461, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.89839927244058e-06, |
|
"loss": 0.4324, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.8816406174168695e-06, |
|
"loss": 0.511, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.86490938234985e-06, |
|
"loss": 0.4729, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.848205642221995e-06, |
|
"loss": 0.4753, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.8315294718925656e-06, |
|
"loss": 0.4349, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.814880946097253e-06, |
|
"loss": 0.431, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.79826013944787e-06, |
|
"loss": 0.4125, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.7816671264320026e-06, |
|
"loss": 0.449, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 0.4941, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.748564778627993e-06, |
|
"loss": 0.4352, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.732055592190893e-06, |
|
"loss": 0.5243, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.7155744960887054e-06, |
|
"loss": 0.4816, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6991215641828903e-06, |
|
"loss": 0.474, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.682696870208685e-06, |
|
"loss": 0.4866, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6663004877747765e-06, |
|
"loss": 0.4757, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.649932490362974e-06, |
|
"loss": 0.4751, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6335929513278667e-06, |
|
"loss": 0.4621, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6172819438965157e-06, |
|
"loss": 0.4522, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.600999541168113e-06, |
|
"loss": 0.4517, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5847458161136485e-06, |
|
"loss": 0.5072, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.568520841575601e-06, |
|
"loss": 0.4187, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5523246902676e-06, |
|
"loss": 0.5158, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5361574347740912e-06, |
|
"loss": 0.5174, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5200191475500313e-06, |
|
"loss": 0.5027, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5039099009205503e-06, |
|
"loss": 0.5001, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.4878297670806303e-06, |
|
"loss": 0.4519, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.4717788180947855e-06, |
|
"loss": 0.4456, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.455757125896725e-06, |
|
"loss": 0.4357, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.439764762289051e-06, |
|
"loss": 0.5258, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.423801798942926e-06, |
|
"loss": 0.485, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.407868307397747e-06, |
|
"loss": 0.4596, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.3919643590608355e-06, |
|
"loss": 0.4605, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.376090025207115e-06, |
|
"loss": 0.527, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.360245376978779e-06, |
|
"loss": 0.5222, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3444304853849918e-06, |
|
"loss": 0.4802, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3286454213015582e-06, |
|
"loss": 0.4119, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.312890255470609e-06, |
|
"loss": 0.4704, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.29716505850028e-06, |
|
"loss": 0.4777, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2814699008644068e-06, |
|
"loss": 0.5015, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.265804852902188e-06, |
|
"loss": 0.505, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.250169984817897e-06, |
|
"loss": 0.4987, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.234565366680542e-06, |
|
"loss": 0.5072, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.2189910684235705e-06, |
|
"loss": 0.4286, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.2034471598445505e-06, |
|
"loss": 0.4485, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.187933710604847e-06, |
|
"loss": 0.4342, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1724507902293267e-06, |
|
"loss": 0.4417, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1569984681060375e-06, |
|
"loss": 0.4898, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.1415768134858947e-06, |
|
"loss": 0.4993, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.1261858954823798e-06, |
|
"loss": 0.4508, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.1108257830712253e-06, |
|
"loss": 0.4708, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.0954965450900963e-06, |
|
"loss": 0.4894, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.080198250238302e-06, |
|
"loss": 0.4292, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.064930967076477e-06, |
|
"loss": 0.4891, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0496947640262655e-06, |
|
"loss": 0.4543, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0344897093700333e-06, |
|
"loss": 0.5044, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.019315871250543e-06, |
|
"loss": 0.4832, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0041733176706668e-06, |
|
"loss": 0.5226, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.9890621164930656e-06, |
|
"loss": 0.4874, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9739823354398958e-06, |
|
"loss": 0.4382, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9589340420924983e-06, |
|
"loss": 0.4743, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.943917303891107e-06, |
|
"loss": 0.4667, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 0.465, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.913978761979849e-06, |
|
"loss": 0.4455, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8990570924421526e-06, |
|
"loss": 0.4802, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8841672463941827e-06, |
|
"loss": 0.4512, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8693092905660757e-06, |
|
"loss": 0.4306, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.854483291545046e-06, |
|
"loss": 0.5132, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8396893157750893e-06, |
|
"loss": 0.513, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8249274295566863e-06, |
|
"loss": 0.427, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.810197699046513e-06, |
|
"loss": 0.545, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.795500190257122e-06, |
|
"loss": 0.4858, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.780834969056676e-06, |
|
"loss": 0.4732, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.766202101168628e-06, |
|
"loss": 0.5097, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7516016521714417e-06, |
|
"loss": 0.5365, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7370336874982952e-06, |
|
"loss": 0.478, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7224982724367775e-06, |
|
"loss": 0.5304, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7079954721286108e-06, |
|
"loss": 0.4352, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.693525351569347e-06, |
|
"loss": 0.4965, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6790879756080845e-06, |
|
"loss": 0.468, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.664683408947173e-06, |
|
"loss": 0.4592, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6503117161419246e-06, |
|
"loss": 0.4686, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6359729616003184e-06, |
|
"loss": 0.4987, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6216672095827267e-06, |
|
"loss": 0.4414, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.607394524201616e-06, |
|
"loss": 0.5126, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5931549694212545e-06, |
|
"loss": 0.479, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.578948609057446e-06, |
|
"loss": 0.495, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.564775506777214e-06, |
|
"loss": 0.4846, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.550635726098547e-06, |
|
"loss": 0.4961, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.536529330390095e-06, |
|
"loss": 0.4479, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5224563828708904e-06, |
|
"loss": 0.4847, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5084169466100626e-06, |
|
"loss": 0.4822, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.4944110845265646e-06, |
|
"loss": 0.4491, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.480438859388873e-06, |
|
"loss": 0.3964, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.466500333814723e-06, |
|
"loss": 0.4545, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.452595570270828e-06, |
|
"loss": 0.5149, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.438724631072581e-06, |
|
"loss": 0.4858, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.424887578383799e-06, |
|
"loss": 0.4764, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.411084474216434e-06, |
|
"loss": 0.4876, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.397315380430284e-06, |
|
"loss": 0.4595, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3835803587327365e-06, |
|
"loss": 0.4792, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.36987947067848e-06, |
|
"loss": 0.447, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.356212777669229e-06, |
|
"loss": 0.4934, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.342580340953451e-06, |
|
"loss": 0.4825, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.328982221626087e-06, |
|
"loss": 0.4789, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3154184806282863e-06, |
|
"loss": 0.4874, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3018891787471308e-06, |
|
"loss": 0.5086, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.2883943766153503e-06, |
|
"loss": 0.4656, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.2749341347110697e-06, |
|
"loss": 0.4276, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.261508513357532e-06, |
|
"loss": 0.4495, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2481175727228142e-06, |
|
"loss": 0.4449, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.234761372819577e-06, |
|
"loss": 0.4749, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2214399735047856e-06, |
|
"loss": 0.4182, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.208153434479442e-06, |
|
"loss": 0.4718, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.194901815288324e-06, |
|
"loss": 0.4782, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1816851753197023e-06, |
|
"loss": 0.4644, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.168503573805096e-06, |
|
"loss": 0.4377, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.155357069818995e-06, |
|
"loss": 0.4816, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1422457222785875e-06, |
|
"loss": 0.4573, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.129169589943514e-06, |
|
"loss": 0.4495, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1161287314155955e-06, |
|
"loss": 0.4526, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1031232051385606e-06, |
|
"loss": 0.5039, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0901530693978055e-06, |
|
"loss": 0.4778, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.077218382320113e-06, |
|
"loss": 0.4648, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0643192018734027e-06, |
|
"loss": 0.4543, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0514555858664663e-06, |
|
"loss": 0.4902, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.038627591948715e-06, |
|
"loss": 0.4463, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.025835277609908e-06, |
|
"loss": 0.4224, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0130787001799103e-06, |
|
"loss": 0.4838, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.000357916828428e-06, |
|
"loss": 0.448, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.987672984564749e-06, |
|
"loss": 0.4837, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.975023960237499e-06, |
|
"loss": 0.4941, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.962410900534367e-06, |
|
"loss": 0.4567, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.949833861981877e-06, |
|
"loss": 0.4767, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9372929009451115e-06, |
|
"loss": 0.48, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.924788073627474e-06, |
|
"loss": 0.481, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9123194360704288e-06, |
|
"loss": 0.4632, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8998870441532569e-06, |
|
"loss": 0.4621, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.887490953592791e-06, |
|
"loss": 0.4372, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.875131219943187e-06, |
|
"loss": 0.4829, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8628078985956622e-06, |
|
"loss": 0.4917, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8505210447782418e-06, |
|
"loss": 0.4202, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8382707135555255e-06, |
|
"loss": 0.5258, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8260569598284361e-06, |
|
"loss": 0.4524, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8138798383339618e-06, |
|
"loss": 0.504, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8017394036449276e-06, |
|
"loss": 0.4862, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7896357101697405e-06, |
|
"loss": 0.5511, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7775688121521518e-06, |
|
"loss": 0.5067, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7655387636710097e-06, |
|
"loss": 0.4571, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7535456186400123e-06, |
|
"loss": 0.4469, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7415894308074777e-06, |
|
"loss": 0.457, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7296702537560994e-06, |
|
"loss": 0.5135, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7177881409026954e-06, |
|
"loss": 0.4279, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7059431454979825e-06, |
|
"loss": 0.4481, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6941353206263366e-06, |
|
"loss": 0.4676, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.682364719205537e-06, |
|
"loss": 0.4786, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6706313939865604e-06, |
|
"loss": 0.4974, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6589353975533174e-06, |
|
"loss": 0.4899, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6472767823224235e-06, |
|
"loss": 0.4866, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6356556005429758e-06, |
|
"loss": 0.4745, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6240719042963004e-06, |
|
"loss": 0.4887, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6125257454957365e-06, |
|
"loss": 0.4655, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6010171758863958e-06, |
|
"loss": 0.4603, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.589546247044923e-06, |
|
"loss": 0.5032, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5781130103792797e-06, |
|
"loss": 0.4366, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5667175171285054e-06, |
|
"loss": 0.4643, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5553598183624885e-06, |
|
"loss": 0.4734, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5440399649817384e-06, |
|
"loss": 0.4551, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5327580077171589e-06, |
|
"loss": 0.4418, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5215139971298131e-06, |
|
"loss": 0.4429, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5103079836107082e-06, |
|
"loss": 0.4758, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.499140017380566e-06, |
|
"loss": 0.5256, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4880101484895882e-06, |
|
"loss": 0.4653, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4769184268172465e-06, |
|
"loss": 0.5014, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4658649020720539e-06, |
|
"loss": 0.4249, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4548496237913335e-06, |
|
"loss": 0.4499, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4438726413410077e-06, |
|
"loss": 0.4787, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4329340039153738e-06, |
|
"loss": 0.4617, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4220337605368816e-06, |
|
"loss": 0.433, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4111719600559159e-06, |
|
"loss": 0.4376, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4003486511505715e-06, |
|
"loss": 0.5173, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3895638823264447e-06, |
|
"loss": 0.4466, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3788177019164117e-06, |
|
"loss": 0.4571, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3681101580804057e-06, |
|
"loss": 0.403, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3574412988052132e-06, |
|
"loss": 0.4925, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3468111719042497e-06, |
|
"loss": 0.5047, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3362198250173454e-06, |
|
"loss": 0.4438, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3256673056105385e-06, |
|
"loss": 0.4347, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3151536609758586e-06, |
|
"loss": 0.458, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3046789382311132e-06, |
|
"loss": 0.4496, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.294243184319679e-06, |
|
"loss": 0.4666, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2838464460102862e-06, |
|
"loss": 0.4909, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.273488769896819e-06, |
|
"loss": 0.5224, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2631702023980997e-06, |
|
"loss": 0.4497, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.252890789757678e-06, |
|
"loss": 0.429, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2426505780436326e-06, |
|
"loss": 0.5406, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2324496131483588e-06, |
|
"loss": 0.509, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2222879407883592e-06, |
|
"loss": 0.4685, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2121656065040498e-06, |
|
"loss": 0.4794, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2020826556595477e-06, |
|
"loss": 0.4763, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.1920391334424675e-06, |
|
"loss": 0.4752, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.182035084863724e-06, |
|
"loss": 0.457, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1720705547573263e-06, |
|
"loss": 0.4407, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1621455877801757e-06, |
|
"loss": 0.478, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1522602284118733e-06, |
|
"loss": 0.447, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1424145209545079e-06, |
|
"loss": 0.4999, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1326085095324712e-06, |
|
"loss": 0.4919, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1228422380922533e-06, |
|
"loss": 0.5095, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1131157504022394e-06, |
|
"loss": 0.438, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1034290900525279e-06, |
|
"loss": 0.4226, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0937823004547254e-06, |
|
"loss": 0.5428, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0841754248417535e-06, |
|
"loss": 0.4432, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0746085062676548e-06, |
|
"loss": 0.4787, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.065081587607406e-06, |
|
"loss": 0.4662, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0555947115567123e-06, |
|
"loss": 0.4909, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0461479206318303e-06, |
|
"loss": 0.4662, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0367412571693746e-06, |
|
"loss": 0.477, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0273747633261144e-06, |
|
"loss": 0.4496, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0180484810788082e-06, |
|
"loss": 0.4565, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0087624522239902e-06, |
|
"loss": 0.4446, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.995167183778055e-07, |
|
"loss": 0.4533, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.903113209758098e-07, |
|
"loss": 0.4226, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.811463012727863e-07, |
|
"loss": 0.5086, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.720217003425648e-07, |
|
"loss": 0.5016, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.629375590778344e-07, |
|
"loss": 0.4919, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.538939181899565e-07, |
|
"loss": 0.5209, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.448908182087913e-07, |
|
"loss": 0.4744, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.359282994825125e-07, |
|
"loss": 0.4767, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.270064021774194e-07, |
|
"loss": 0.438, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.181251662777668e-07, |
|
"loss": 0.4188, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.092846315855841e-07, |
|
"loss": 0.5006, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.004848377204878e-07, |
|
"loss": 0.487, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.917258241195181e-07, |
|
"loss": 0.4957, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.830076300369517e-07, |
|
"loss": 0.4685, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.74330294544129e-07, |
|
"loss": 0.5051, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.656938565292805e-07, |
|
"loss": 0.4391, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.570983546973466e-07, |
|
"loss": 0.4274, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.485438275698154e-07, |
|
"loss": 0.4839, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.4003031348454e-07, |
|
"loss": 0.5098, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.315578505955679e-07, |
|
"loss": 0.4367, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.231264768729753e-07, |
|
"loss": 0.426, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.14736230102694e-07, |
|
"loss": 0.4414, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.063871478863383e-07, |
|
"loss": 0.4452, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.980792676410409e-07, |
|
"loss": 0.438, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.898126265992912e-07, |
|
"loss": 0.4272, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.815872618087506e-07, |
|
"loss": 0.4749, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.734032101321054e-07, |
|
"loss": 0.4283, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.652605082468912e-07, |
|
"loss": 0.4709, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.571591926453292e-07, |
|
"loss": 0.4376, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.490992996341662e-07, |
|
"loss": 0.4278, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.410808653345092e-07, |
|
"loss": 0.533, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.331039256816664e-07, |
|
"loss": 0.4748, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.251685164249823e-07, |
|
"loss": 0.4575, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.17274673127677e-07, |
|
"loss": 0.4213, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.094224311666953e-07, |
|
"loss": 0.4878, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.016118257325399e-07, |
|
"loss": 0.4468, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.938428918291107e-07, |
|
"loss": 0.5133, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.861156642735578e-07, |
|
"loss": 0.4635, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.784301776961222e-07, |
|
"loss": 0.5525, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.707864665399733e-07, |
|
"loss": 0.4544, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.631845650610647e-07, |
|
"loss": 0.4622, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.556245073279777e-07, |
|
"loss": 0.4354, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.481063272217603e-07, |
|
"loss": 0.4639, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.406300584357905e-07, |
|
"loss": 0.4796, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.331957344756123e-07, |
|
"loss": 0.4981, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.258033886587911e-07, |
|
"loss": 0.4776, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.184530541147649e-07, |
|
"loss": 0.4705, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.111447637846923e-07, |
|
"loss": 0.4115, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.038785504213096e-07, |
|
"loss": 0.3974, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.966544465887803e-07, |
|
"loss": 0.47, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.894724846625499e-07, |
|
"loss": 0.501, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.823326968292009e-07, |
|
"loss": 0.5106, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.752351150863133e-07, |
|
"loss": 0.4754, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.681797712423099e-07, |
|
"loss": 0.4708, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.611666969163243e-07, |
|
"loss": 0.4429, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.541959235380578e-07, |
|
"loss": 0.4753, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.472674823476343e-07, |
|
"loss": 0.4835, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.403814043954592e-07, |
|
"loss": 0.4537, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.335377205420911e-07, |
|
"loss": 0.4342, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.267364614580861e-07, |
|
"loss": 0.4493, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.199776576238791e-07, |
|
"loss": 0.4988, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.132613393296293e-07, |
|
"loss": 0.3887, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.065875366751016e-07, |
|
"loss": 0.4718, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.999562795695201e-07, |
|
"loss": 0.515, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.933675977314334e-07, |
|
"loss": 0.4398, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.868215206885918e-07, |
|
"loss": 0.477, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.80318077777805e-07, |
|
"loss": 0.4767, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.738572981448153e-07, |
|
"loss": 0.4699, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6743921074416387e-07, |
|
"loss": 0.5079, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.61063844339068e-07, |
|
"loss": 0.497, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.547312275012794e-07, |
|
"loss": 0.4395, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.4844138861096954e-07, |
|
"loss": 0.4369, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.421943558565966e-07, |
|
"loss": 0.4174, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.359901572347758e-07, |
|
"loss": 0.4954, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.298288205501633e-07, |
|
"loss": 0.4736, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.2371037341531805e-07, |
|
"loss": 0.4572, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.176348432505917e-07, |
|
"loss": 0.5145, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.116022572839984e-07, |
|
"loss": 0.3836, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.0561264255109424e-07, |
|
"loss": 0.4447, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.996660258948548e-07, |
|
"loss": 0.4725, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.937624339655599e-07, |
|
"loss": 0.4865, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.879018932206624e-07, |
|
"loss": 0.4848, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.820844299246851e-07, |
|
"loss": 0.5038, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.7631007014909294e-07, |
|
"loss": 0.4953, |
|
"step": 1400 |
|
} |
|
], |
|
"max_steps": 1530, |
|
"num_train_epochs": 3, |
|
"total_flos": 3.048437414847578e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|