|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 285, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0035087719298245615, |
|
"grad_norm": 18.65219621501764, |
|
"learning_rate": 0.0, |
|
"loss": 1.9653, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.007017543859649123, |
|
"grad_norm": 5.5068522642682325, |
|
"learning_rate": 2.5237190142858296e-05, |
|
"loss": 1.4912, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010526315789473684, |
|
"grad_norm": 5.300187955783763, |
|
"learning_rate": 4e-05, |
|
"loss": 1.488, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.014035087719298246, |
|
"grad_norm": 3.607547540937298, |
|
"learning_rate": 5.047438028571659e-05, |
|
"loss": 1.3376, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.017543859649122806, |
|
"grad_norm": 2.8856871756952254, |
|
"learning_rate": 5.859894082871708e-05, |
|
"loss": 1.2775, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.021052631578947368, |
|
"grad_norm": 2.1943773684803283, |
|
"learning_rate": 6.52371901428583e-05, |
|
"loss": 1.1621, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02456140350877193, |
|
"grad_norm": 1.6148482128986932, |
|
"learning_rate": 7.08497499664569e-05, |
|
"loss": 1.1393, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.028070175438596492, |
|
"grad_norm": 1.4378600160323183, |
|
"learning_rate": 7.571157042857488e-05, |
|
"loss": 1.1342, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.031578947368421054, |
|
"grad_norm": 1.3979572631886956, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1196, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03508771929824561, |
|
"grad_norm": 1.3666278253029815, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1479, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03859649122807018, |
|
"grad_norm": 1.2483549253543214, |
|
"learning_rate": 7.971014492753623e-05, |
|
"loss": 1.1125, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.042105263157894736, |
|
"grad_norm": 1.347256910957925, |
|
"learning_rate": 7.942028985507246e-05, |
|
"loss": 1.1588, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0456140350877193, |
|
"grad_norm": 1.3748876632936469, |
|
"learning_rate": 7.91304347826087e-05, |
|
"loss": 1.0846, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04912280701754386, |
|
"grad_norm": 1.329787006205099, |
|
"learning_rate": 7.884057971014494e-05, |
|
"loss": 1.1839, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 1.1855019705035863, |
|
"learning_rate": 7.855072463768117e-05, |
|
"loss": 1.1546, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.056140350877192984, |
|
"grad_norm": 1.2020228064951217, |
|
"learning_rate": 7.82608695652174e-05, |
|
"loss": 1.0999, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05964912280701754, |
|
"grad_norm": 1.2879067395522685, |
|
"learning_rate": 7.797101449275363e-05, |
|
"loss": 1.1466, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06315789473684211, |
|
"grad_norm": 1.256590992120541, |
|
"learning_rate": 7.768115942028987e-05, |
|
"loss": 1.1593, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06666666666666667, |
|
"grad_norm": 1.1654027316591642, |
|
"learning_rate": 7.73913043478261e-05, |
|
"loss": 1.1806, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.07017543859649122, |
|
"grad_norm": 1.2007307897010868, |
|
"learning_rate": 7.710144927536232e-05, |
|
"loss": 1.1416, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07368421052631578, |
|
"grad_norm": 1.1499545393774593, |
|
"learning_rate": 7.681159420289856e-05, |
|
"loss": 1.1203, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07719298245614035, |
|
"grad_norm": 1.0810536737354983, |
|
"learning_rate": 7.652173913043479e-05, |
|
"loss": 1.0875, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08070175438596491, |
|
"grad_norm": 1.249217128847186, |
|
"learning_rate": 7.623188405797102e-05, |
|
"loss": 1.1878, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08421052631578947, |
|
"grad_norm": 1.171401194067982, |
|
"learning_rate": 7.594202898550726e-05, |
|
"loss": 1.1416, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08771929824561403, |
|
"grad_norm": 1.1864422136018933, |
|
"learning_rate": 7.565217391304349e-05, |
|
"loss": 1.1559, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0912280701754386, |
|
"grad_norm": 1.1084100899911409, |
|
"learning_rate": 7.536231884057971e-05, |
|
"loss": 1.127, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09473684210526316, |
|
"grad_norm": 1.0848431056969332, |
|
"learning_rate": 7.507246376811594e-05, |
|
"loss": 1.1621, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09824561403508772, |
|
"grad_norm": 1.160465065160364, |
|
"learning_rate": 7.478260869565218e-05, |
|
"loss": 1.1086, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.10175438596491228, |
|
"grad_norm": 1.0888824123502896, |
|
"learning_rate": 7.449275362318841e-05, |
|
"loss": 1.1402, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 1.136530946229479, |
|
"learning_rate": 7.420289855072465e-05, |
|
"loss": 1.1533, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10877192982456141, |
|
"grad_norm": 1.1193975180437097, |
|
"learning_rate": 7.391304347826088e-05, |
|
"loss": 1.1194, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.11228070175438597, |
|
"grad_norm": 1.032391835314362, |
|
"learning_rate": 7.36231884057971e-05, |
|
"loss": 1.086, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11578947368421053, |
|
"grad_norm": 1.1734719012857107, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 1.1515, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11929824561403508, |
|
"grad_norm": 1.1223429839275594, |
|
"learning_rate": 7.304347826086957e-05, |
|
"loss": 1.1067, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12280701754385964, |
|
"grad_norm": 1.0919749212433656, |
|
"learning_rate": 7.27536231884058e-05, |
|
"loss": 1.1298, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12631578947368421, |
|
"grad_norm": 1.061313889855867, |
|
"learning_rate": 7.246376811594204e-05, |
|
"loss": 1.1323, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12982456140350876, |
|
"grad_norm": 0.9886502393238654, |
|
"learning_rate": 7.217391304347827e-05, |
|
"loss": 1.1358, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.13333333333333333, |
|
"grad_norm": 1.090149070089068, |
|
"learning_rate": 7.18840579710145e-05, |
|
"loss": 1.1083, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1368421052631579, |
|
"grad_norm": 1.2141824886927202, |
|
"learning_rate": 7.159420289855072e-05, |
|
"loss": 1.1659, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.14035087719298245, |
|
"grad_norm": 1.2804981481033841, |
|
"learning_rate": 7.130434782608696e-05, |
|
"loss": 1.1769, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14385964912280702, |
|
"grad_norm": 1.085391641580103, |
|
"learning_rate": 7.101449275362319e-05, |
|
"loss": 1.1307, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14736842105263157, |
|
"grad_norm": 1.0432511632089205, |
|
"learning_rate": 7.072463768115943e-05, |
|
"loss": 1.1152, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.15087719298245614, |
|
"grad_norm": 1.0805636921488617, |
|
"learning_rate": 7.043478260869566e-05, |
|
"loss": 1.1131, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1543859649122807, |
|
"grad_norm": 1.0831222641710494, |
|
"learning_rate": 7.014492753623189e-05, |
|
"loss": 1.1585, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 0.9919690920164701, |
|
"learning_rate": 6.985507246376811e-05, |
|
"loss": 1.1235, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.16140350877192983, |
|
"grad_norm": 0.9498489248223517, |
|
"learning_rate": 6.956521739130436e-05, |
|
"loss": 1.0856, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1649122807017544, |
|
"grad_norm": 1.0276715714307674, |
|
"learning_rate": 6.927536231884058e-05, |
|
"loss": 1.1378, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16842105263157894, |
|
"grad_norm": 1.094458012231917, |
|
"learning_rate": 6.898550724637682e-05, |
|
"loss": 1.117, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.17192982456140352, |
|
"grad_norm": 1.089782170199694, |
|
"learning_rate": 6.869565217391305e-05, |
|
"loss": 1.1471, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.17543859649122806, |
|
"grad_norm": 1.0908602591310133, |
|
"learning_rate": 6.840579710144928e-05, |
|
"loss": 1.1711, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17894736842105263, |
|
"grad_norm": 1.126099914721245, |
|
"learning_rate": 6.811594202898552e-05, |
|
"loss": 1.1437, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1824561403508772, |
|
"grad_norm": 1.1004469394026717, |
|
"learning_rate": 6.782608695652175e-05, |
|
"loss": 1.1502, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.18596491228070175, |
|
"grad_norm": 1.0690976698679064, |
|
"learning_rate": 6.753623188405797e-05, |
|
"loss": 1.1436, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18947368421052632, |
|
"grad_norm": 1.0893372270742567, |
|
"learning_rate": 6.72463768115942e-05, |
|
"loss": 1.1028, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.19298245614035087, |
|
"grad_norm": 1.038406028318434, |
|
"learning_rate": 6.695652173913044e-05, |
|
"loss": 1.1601, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.19649122807017544, |
|
"grad_norm": 0.9960149874705063, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.102, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.073973228867499, |
|
"learning_rate": 6.637681159420291e-05, |
|
"loss": 1.1213, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.20350877192982456, |
|
"grad_norm": 1.1952354445407958, |
|
"learning_rate": 6.608695652173914e-05, |
|
"loss": 1.1046, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.20701754385964913, |
|
"grad_norm": 1.0175331854073473, |
|
"learning_rate": 6.579710144927537e-05, |
|
"loss": 1.1192, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 1.053567224648277, |
|
"learning_rate": 6.550724637681159e-05, |
|
"loss": 1.1514, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21403508771929824, |
|
"grad_norm": 1.1549538449027954, |
|
"learning_rate": 6.521739130434783e-05, |
|
"loss": 1.161, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.21754385964912282, |
|
"grad_norm": 0.9980361090477836, |
|
"learning_rate": 6.492753623188406e-05, |
|
"loss": 1.0715, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.22105263157894736, |
|
"grad_norm": 1.0664047714210805, |
|
"learning_rate": 6.46376811594203e-05, |
|
"loss": 1.172, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.22456140350877193, |
|
"grad_norm": 1.0338684936573976, |
|
"learning_rate": 6.434782608695653e-05, |
|
"loss": 1.0547, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22807017543859648, |
|
"grad_norm": 1.0164199027592216, |
|
"learning_rate": 6.405797101449276e-05, |
|
"loss": 1.0821, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.23157894736842105, |
|
"grad_norm": 1.08287922650492, |
|
"learning_rate": 6.376811594202898e-05, |
|
"loss": 1.112, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.23508771929824562, |
|
"grad_norm": 1.012260870534961, |
|
"learning_rate": 6.347826086956523e-05, |
|
"loss": 1.1339, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.23859649122807017, |
|
"grad_norm": 0.9284957156873009, |
|
"learning_rate": 6.318840579710145e-05, |
|
"loss": 1.0655, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.24210526315789474, |
|
"grad_norm": 0.9699566883456087, |
|
"learning_rate": 6.28985507246377e-05, |
|
"loss": 1.0447, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.24561403508771928, |
|
"grad_norm": 0.9953128820241032, |
|
"learning_rate": 6.260869565217392e-05, |
|
"loss": 1.0979, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24912280701754386, |
|
"grad_norm": 1.2255506094662556, |
|
"learning_rate": 6.231884057971015e-05, |
|
"loss": 1.1266, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.25263157894736843, |
|
"grad_norm": 1.1053469341884643, |
|
"learning_rate": 6.202898550724638e-05, |
|
"loss": 1.1083, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.256140350877193, |
|
"grad_norm": 1.0471395283209621, |
|
"learning_rate": 6.173913043478262e-05, |
|
"loss": 1.1081, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2596491228070175, |
|
"grad_norm": 1.1372933010569457, |
|
"learning_rate": 6.144927536231884e-05, |
|
"loss": 1.1191, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 1.094007360015913, |
|
"learning_rate": 6.115942028985507e-05, |
|
"loss": 1.1498, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 1.037731837007179, |
|
"learning_rate": 6.086956521739131e-05, |
|
"loss": 1.078, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.27017543859649124, |
|
"grad_norm": 0.9878690341929469, |
|
"learning_rate": 6.057971014492755e-05, |
|
"loss": 1.0925, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2736842105263158, |
|
"grad_norm": 1.0251650017115257, |
|
"learning_rate": 6.0289855072463774e-05, |
|
"loss": 1.0992, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2771929824561403, |
|
"grad_norm": 0.9677079627985357, |
|
"learning_rate": 6.000000000000001e-05, |
|
"loss": 1.0228, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2807017543859649, |
|
"grad_norm": 1.03084630118753, |
|
"learning_rate": 5.9710144927536236e-05, |
|
"loss": 1.1026, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.28421052631578947, |
|
"grad_norm": 1.149025536043054, |
|
"learning_rate": 5.942028985507246e-05, |
|
"loss": 1.0971, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.28771929824561404, |
|
"grad_norm": 0.8977367483124615, |
|
"learning_rate": 5.91304347826087e-05, |
|
"loss": 1.0665, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2912280701754386, |
|
"grad_norm": 0.982319885338846, |
|
"learning_rate": 5.884057971014494e-05, |
|
"loss": 1.0987, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.29473684210526313, |
|
"grad_norm": 0.8863683819755959, |
|
"learning_rate": 5.8550724637681166e-05, |
|
"loss": 1.0603, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2982456140350877, |
|
"grad_norm": 1.0566465114428347, |
|
"learning_rate": 5.82608695652174e-05, |
|
"loss": 1.0961, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3017543859649123, |
|
"grad_norm": 0.9391730832092359, |
|
"learning_rate": 5.797101449275363e-05, |
|
"loss": 1.1184, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.30526315789473685, |
|
"grad_norm": 0.9725750087741657, |
|
"learning_rate": 5.768115942028986e-05, |
|
"loss": 1.1131, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3087719298245614, |
|
"grad_norm": 0.9134535098625444, |
|
"learning_rate": 5.739130434782609e-05, |
|
"loss": 1.0635, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.312280701754386, |
|
"grad_norm": 0.9210153237928186, |
|
"learning_rate": 5.710144927536232e-05, |
|
"loss": 1.0208, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 1.073076083662152, |
|
"learning_rate": 5.681159420289856e-05, |
|
"loss": 1.1314, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3192982456140351, |
|
"grad_norm": 0.9246736723369094, |
|
"learning_rate": 5.652173913043479e-05, |
|
"loss": 1.0458, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.32280701754385965, |
|
"grad_norm": 0.9415979605613132, |
|
"learning_rate": 5.623188405797102e-05, |
|
"loss": 1.0432, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3263157894736842, |
|
"grad_norm": 1.0029090729561863, |
|
"learning_rate": 5.594202898550725e-05, |
|
"loss": 1.1045, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3298245614035088, |
|
"grad_norm": 1.0012689311514984, |
|
"learning_rate": 5.565217391304348e-05, |
|
"loss": 1.0923, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.9253904632856298, |
|
"learning_rate": 5.5362318840579714e-05, |
|
"loss": 1.028, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3368421052631579, |
|
"grad_norm": 1.006794875176782, |
|
"learning_rate": 5.507246376811594e-05, |
|
"loss": 1.0772, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.34035087719298246, |
|
"grad_norm": 0.9652341181001821, |
|
"learning_rate": 5.478260869565218e-05, |
|
"loss": 1.0544, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.34385964912280703, |
|
"grad_norm": 0.9633053495992878, |
|
"learning_rate": 5.449275362318841e-05, |
|
"loss": 1.0369, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3473684210526316, |
|
"grad_norm": 0.889193134564978, |
|
"learning_rate": 5.4202898550724644e-05, |
|
"loss": 1.0571, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3508771929824561, |
|
"grad_norm": 1.0688458576983824, |
|
"learning_rate": 5.391304347826087e-05, |
|
"loss": 1.0875, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3543859649122807, |
|
"grad_norm": 0.9618220162761959, |
|
"learning_rate": 5.3623188405797106e-05, |
|
"loss": 1.0989, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.35789473684210527, |
|
"grad_norm": 1.0116504967458333, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.1362, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.36140350877192984, |
|
"grad_norm": 0.9446835004736188, |
|
"learning_rate": 5.304347826086957e-05, |
|
"loss": 1.0549, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3649122807017544, |
|
"grad_norm": 1.2634297228252125, |
|
"learning_rate": 5.27536231884058e-05, |
|
"loss": 1.0865, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3684210526315789, |
|
"grad_norm": 1.0070752681998931, |
|
"learning_rate": 5.2463768115942036e-05, |
|
"loss": 1.071, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3719298245614035, |
|
"grad_norm": 1.0889370997100492, |
|
"learning_rate": 5.217391304347826e-05, |
|
"loss": 1.1063, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.37543859649122807, |
|
"grad_norm": 0.9667935705662773, |
|
"learning_rate": 5.18840579710145e-05, |
|
"loss": 1.0666, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.37894736842105264, |
|
"grad_norm": 0.9817859943047047, |
|
"learning_rate": 5.1594202898550725e-05, |
|
"loss": 1.1025, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3824561403508772, |
|
"grad_norm": 0.8710066297645505, |
|
"learning_rate": 5.130434782608696e-05, |
|
"loss": 1.1057, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.38596491228070173, |
|
"grad_norm": 1.0647772098482442, |
|
"learning_rate": 5.1014492753623186e-05, |
|
"loss": 1.1315, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3894736842105263, |
|
"grad_norm": 0.9399046787831713, |
|
"learning_rate": 5.072463768115943e-05, |
|
"loss": 1.0572, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3929824561403509, |
|
"grad_norm": 0.8768124884597763, |
|
"learning_rate": 5.043478260869566e-05, |
|
"loss": 1.0401, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.39649122807017545, |
|
"grad_norm": 1.009086025516714, |
|
"learning_rate": 5.014492753623189e-05, |
|
"loss": 1.0604, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9419034237826691, |
|
"learning_rate": 4.9855072463768116e-05, |
|
"loss": 1.0858, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.40350877192982454, |
|
"grad_norm": 0.9876303358742554, |
|
"learning_rate": 4.956521739130435e-05, |
|
"loss": 1.083, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4070175438596491, |
|
"grad_norm": 0.934675922910989, |
|
"learning_rate": 4.927536231884058e-05, |
|
"loss": 1.0749, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4105263157894737, |
|
"grad_norm": 0.9813286336072813, |
|
"learning_rate": 4.898550724637682e-05, |
|
"loss": 1.0721, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.41403508771929826, |
|
"grad_norm": 1.0168504955660564, |
|
"learning_rate": 4.869565217391305e-05, |
|
"loss": 1.0276, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.41754385964912283, |
|
"grad_norm": 0.9087837180593036, |
|
"learning_rate": 4.840579710144928e-05, |
|
"loss": 1.0104, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 0.8834303398491904, |
|
"learning_rate": 4.8115942028985514e-05, |
|
"loss": 1.032, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4245614035087719, |
|
"grad_norm": 0.9525280076015293, |
|
"learning_rate": 4.782608695652174e-05, |
|
"loss": 1.0257, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4280701754385965, |
|
"grad_norm": 0.9493713737636021, |
|
"learning_rate": 4.7536231884057976e-05, |
|
"loss": 1.0273, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.43157894736842106, |
|
"grad_norm": 0.9098239721172197, |
|
"learning_rate": 4.72463768115942e-05, |
|
"loss": 0.981, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.43508771929824563, |
|
"grad_norm": 0.9664557596159875, |
|
"learning_rate": 4.6956521739130444e-05, |
|
"loss": 1.1167, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.43859649122807015, |
|
"grad_norm": 0.8663800542676098, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.0566, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4421052631578947, |
|
"grad_norm": 0.8587329676076015, |
|
"learning_rate": 4.6376811594202906e-05, |
|
"loss": 1.0457, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4456140350877193, |
|
"grad_norm": 1.0139380452518938, |
|
"learning_rate": 4.608695652173913e-05, |
|
"loss": 1.0871, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.44912280701754387, |
|
"grad_norm": 0.87218312389074, |
|
"learning_rate": 4.579710144927537e-05, |
|
"loss": 0.9934, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.45263157894736844, |
|
"grad_norm": 0.945265788938204, |
|
"learning_rate": 4.5507246376811595e-05, |
|
"loss": 1.0598, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.45614035087719296, |
|
"grad_norm": 1.0447712876512982, |
|
"learning_rate": 4.521739130434783e-05, |
|
"loss": 1.0208, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.45964912280701753, |
|
"grad_norm": 0.8863649966336605, |
|
"learning_rate": 4.492753623188406e-05, |
|
"loss": 0.9916, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4631578947368421, |
|
"grad_norm": 0.9389072020523048, |
|
"learning_rate": 4.46376811594203e-05, |
|
"loss": 1.039, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4666666666666667, |
|
"grad_norm": 1.00128465703121, |
|
"learning_rate": 4.4347826086956525e-05, |
|
"loss": 1.007, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.47017543859649125, |
|
"grad_norm": 0.9218948102233011, |
|
"learning_rate": 4.405797101449276e-05, |
|
"loss": 1.0489, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.47368421052631576, |
|
"grad_norm": 0.8980509099966125, |
|
"learning_rate": 4.3768115942028986e-05, |
|
"loss": 1.0539, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.47719298245614034, |
|
"grad_norm": 0.8922781997772711, |
|
"learning_rate": 4.347826086956522e-05, |
|
"loss": 1.0653, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4807017543859649, |
|
"grad_norm": 0.9229578265481162, |
|
"learning_rate": 4.318840579710145e-05, |
|
"loss": 1.0359, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4842105263157895, |
|
"grad_norm": 0.9864176248898312, |
|
"learning_rate": 4.289855072463769e-05, |
|
"loss": 1.0711, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.48771929824561405, |
|
"grad_norm": 0.9040178292405336, |
|
"learning_rate": 4.2608695652173916e-05, |
|
"loss": 1.0615, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.49122807017543857, |
|
"grad_norm": 0.9065042000785789, |
|
"learning_rate": 4.231884057971015e-05, |
|
"loss": 1.0267, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.49473684210526314, |
|
"grad_norm": 0.9289938532156966, |
|
"learning_rate": 4.202898550724638e-05, |
|
"loss": 1.0269, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4982456140350877, |
|
"grad_norm": 0.8703398745310128, |
|
"learning_rate": 4.173913043478261e-05, |
|
"loss": 0.9952, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5017543859649123, |
|
"grad_norm": 0.9754555079004449, |
|
"learning_rate": 4.144927536231884e-05, |
|
"loss": 1.0744, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5052631578947369, |
|
"grad_norm": 1.0698316819851512, |
|
"learning_rate": 4.115942028985507e-05, |
|
"loss": 1.0179, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5087719298245614, |
|
"grad_norm": 0.9380968706540932, |
|
"learning_rate": 4.0869565217391314e-05, |
|
"loss": 0.9932, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.512280701754386, |
|
"grad_norm": 0.9380079488737321, |
|
"learning_rate": 4.057971014492754e-05, |
|
"loss": 1.0655, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5157894736842106, |
|
"grad_norm": 0.9764065323167558, |
|
"learning_rate": 4.0289855072463776e-05, |
|
"loss": 1.0096, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.519298245614035, |
|
"grad_norm": 0.917590332443622, |
|
"learning_rate": 4e-05, |
|
"loss": 1.0376, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5228070175438596, |
|
"grad_norm": 0.9466091457378278, |
|
"learning_rate": 3.971014492753623e-05, |
|
"loss": 1.0722, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 0.9998149019870339, |
|
"learning_rate": 3.942028985507247e-05, |
|
"loss": 1.0747, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5298245614035088, |
|
"grad_norm": 0.9592124249246545, |
|
"learning_rate": 3.91304347826087e-05, |
|
"loss": 0.9772, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.8702377525213307, |
|
"learning_rate": 3.884057971014493e-05, |
|
"loss": 1.0416, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5368421052631579, |
|
"grad_norm": 0.9408998854641563, |
|
"learning_rate": 3.855072463768116e-05, |
|
"loss": 1.053, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5403508771929825, |
|
"grad_norm": 0.913519043609596, |
|
"learning_rate": 3.8260869565217395e-05, |
|
"loss": 1.0653, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.543859649122807, |
|
"grad_norm": 1.0325941411560875, |
|
"learning_rate": 3.797101449275363e-05, |
|
"loss": 1.0248, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5473684210526316, |
|
"grad_norm": 1.0088857759548826, |
|
"learning_rate": 3.7681159420289856e-05, |
|
"loss": 1.0984, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5508771929824562, |
|
"grad_norm": 0.9428277439596274, |
|
"learning_rate": 3.739130434782609e-05, |
|
"loss": 1.0176, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.5543859649122806, |
|
"grad_norm": 0.9286332315925182, |
|
"learning_rate": 3.7101449275362325e-05, |
|
"loss": 1.0141, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5578947368421052, |
|
"grad_norm": 0.8869336003769103, |
|
"learning_rate": 3.681159420289855e-05, |
|
"loss": 1.0075, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5614035087719298, |
|
"grad_norm": 0.8749772917176272, |
|
"learning_rate": 3.6521739130434786e-05, |
|
"loss": 0.9922, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5649122807017544, |
|
"grad_norm": 0.9177592855322187, |
|
"learning_rate": 3.623188405797102e-05, |
|
"loss": 1.047, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5684210526315789, |
|
"grad_norm": 0.9681450836764032, |
|
"learning_rate": 3.594202898550725e-05, |
|
"loss": 0.9947, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5719298245614035, |
|
"grad_norm": 0.8944949579910934, |
|
"learning_rate": 3.565217391304348e-05, |
|
"loss": 1.0414, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5754385964912281, |
|
"grad_norm": 0.823418699987978, |
|
"learning_rate": 3.5362318840579716e-05, |
|
"loss": 0.9991, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5789473684210527, |
|
"grad_norm": 0.9010716175104698, |
|
"learning_rate": 3.5072463768115943e-05, |
|
"loss": 0.957, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5824561403508772, |
|
"grad_norm": 0.8553233244911422, |
|
"learning_rate": 3.478260869565218e-05, |
|
"loss": 0.9758, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5859649122807018, |
|
"grad_norm": 0.8955709532913586, |
|
"learning_rate": 3.449275362318841e-05, |
|
"loss": 1.0323, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5894736842105263, |
|
"grad_norm": 0.9632249383426715, |
|
"learning_rate": 3.420289855072464e-05, |
|
"loss": 1.0214, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5929824561403508, |
|
"grad_norm": 1.0070882268479324, |
|
"learning_rate": 3.391304347826087e-05, |
|
"loss": 1.0443, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5964912280701754, |
|
"grad_norm": 0.9089812750457459, |
|
"learning_rate": 3.36231884057971e-05, |
|
"loss": 0.9986, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9687903432716775, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.0469, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6035087719298246, |
|
"grad_norm": 0.9065456102432492, |
|
"learning_rate": 3.304347826086957e-05, |
|
"loss": 1.0736, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.6070175438596491, |
|
"grad_norm": 0.8191700226087758, |
|
"learning_rate": 3.2753623188405796e-05, |
|
"loss": 0.9516, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.6105263157894737, |
|
"grad_norm": 0.910478607503674, |
|
"learning_rate": 3.246376811594203e-05, |
|
"loss": 1.0594, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.6140350877192983, |
|
"grad_norm": 0.9847223979786262, |
|
"learning_rate": 3.2173913043478265e-05, |
|
"loss": 1.0592, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6175438596491228, |
|
"grad_norm": 0.9377862201207361, |
|
"learning_rate": 3.188405797101449e-05, |
|
"loss": 1.0477, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.6210526315789474, |
|
"grad_norm": 0.9077059344710785, |
|
"learning_rate": 3.1594202898550726e-05, |
|
"loss": 1.0394, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.624561403508772, |
|
"grad_norm": 1.038075786423051, |
|
"learning_rate": 3.130434782608696e-05, |
|
"loss": 1.0548, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6280701754385964, |
|
"grad_norm": 0.9160209790965669, |
|
"learning_rate": 3.101449275362319e-05, |
|
"loss": 0.9976, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 0.8109415841452218, |
|
"learning_rate": 3.072463768115942e-05, |
|
"loss": 1.0091, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6350877192982456, |
|
"grad_norm": 0.8492567440061373, |
|
"learning_rate": 3.0434782608695656e-05, |
|
"loss": 1.0038, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6385964912280702, |
|
"grad_norm": 0.9589169451188458, |
|
"learning_rate": 3.0144927536231887e-05, |
|
"loss": 1.0207, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6421052631578947, |
|
"grad_norm": 0.8848409951191845, |
|
"learning_rate": 2.9855072463768118e-05, |
|
"loss": 0.9767, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6456140350877193, |
|
"grad_norm": 0.8955318088948372, |
|
"learning_rate": 2.956521739130435e-05, |
|
"loss": 1.0078, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6491228070175439, |
|
"grad_norm": 0.8900555696345945, |
|
"learning_rate": 2.9275362318840583e-05, |
|
"loss": 1.058, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6526315789473685, |
|
"grad_norm": 0.9579354937026371, |
|
"learning_rate": 2.8985507246376814e-05, |
|
"loss": 1.0052, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.656140350877193, |
|
"grad_norm": 0.8773237300221255, |
|
"learning_rate": 2.8695652173913044e-05, |
|
"loss": 1.0132, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.6596491228070176, |
|
"grad_norm": 0.9290671958630004, |
|
"learning_rate": 2.840579710144928e-05, |
|
"loss": 0.9978, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6631578947368421, |
|
"grad_norm": 0.9098091343168585, |
|
"learning_rate": 2.811594202898551e-05, |
|
"loss": 0.986, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.9494694332915156, |
|
"learning_rate": 2.782608695652174e-05, |
|
"loss": 1.0194, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6701754385964912, |
|
"grad_norm": 0.8801338941113726, |
|
"learning_rate": 2.753623188405797e-05, |
|
"loss": 0.969, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6736842105263158, |
|
"grad_norm": 0.9510753232451794, |
|
"learning_rate": 2.7246376811594205e-05, |
|
"loss": 0.9742, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6771929824561403, |
|
"grad_norm": 0.9589458183246781, |
|
"learning_rate": 2.6956521739130436e-05, |
|
"loss": 1.0525, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.6807017543859649, |
|
"grad_norm": 0.879997612951922, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.9824, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.6842105263157895, |
|
"grad_norm": 0.8389910470867041, |
|
"learning_rate": 2.63768115942029e-05, |
|
"loss": 1.0056, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6877192982456141, |
|
"grad_norm": 0.9163566396967311, |
|
"learning_rate": 2.608695652173913e-05, |
|
"loss": 0.9976, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.6912280701754386, |
|
"grad_norm": 0.9772320516342279, |
|
"learning_rate": 2.5797101449275362e-05, |
|
"loss": 1.0338, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.6947368421052632, |
|
"grad_norm": 0.960770516269451, |
|
"learning_rate": 2.5507246376811593e-05, |
|
"loss": 1.0611, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.6982456140350877, |
|
"grad_norm": 0.8890098864104716, |
|
"learning_rate": 2.521739130434783e-05, |
|
"loss": 1.0168, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.7017543859649122, |
|
"grad_norm": 0.8658933316106927, |
|
"learning_rate": 2.4927536231884058e-05, |
|
"loss": 1.011, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7052631578947368, |
|
"grad_norm": 0.8576704263418472, |
|
"learning_rate": 2.463768115942029e-05, |
|
"loss": 0.9807, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.7087719298245614, |
|
"grad_norm": 0.9186773959277461, |
|
"learning_rate": 2.4347826086956526e-05, |
|
"loss": 0.9284, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.712280701754386, |
|
"grad_norm": 0.858587966202896, |
|
"learning_rate": 2.4057971014492757e-05, |
|
"loss": 0.9795, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.7157894736842105, |
|
"grad_norm": 0.9422658689165894, |
|
"learning_rate": 2.3768115942028988e-05, |
|
"loss": 0.9764, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.7192982456140351, |
|
"grad_norm": 1.0137944232610083, |
|
"learning_rate": 2.3478260869565222e-05, |
|
"loss": 1.0627, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7228070175438597, |
|
"grad_norm": 0.9853442185067721, |
|
"learning_rate": 2.3188405797101453e-05, |
|
"loss": 1.0289, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.7263157894736842, |
|
"grad_norm": 0.8923047177872453, |
|
"learning_rate": 2.2898550724637684e-05, |
|
"loss": 1.0172, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7298245614035088, |
|
"grad_norm": 0.8439161677593868, |
|
"learning_rate": 2.2608695652173914e-05, |
|
"loss": 1.0484, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7333333333333333, |
|
"grad_norm": 0.8201410283081917, |
|
"learning_rate": 2.231884057971015e-05, |
|
"loss": 1.0168, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 0.8442899557106462, |
|
"learning_rate": 2.202898550724638e-05, |
|
"loss": 0.9728, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7403508771929824, |
|
"grad_norm": 0.9098630031184369, |
|
"learning_rate": 2.173913043478261e-05, |
|
"loss": 1.0304, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.743859649122807, |
|
"grad_norm": 0.8217923688708064, |
|
"learning_rate": 2.1449275362318844e-05, |
|
"loss": 1.0334, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.7473684210526316, |
|
"grad_norm": 0.8420848196609938, |
|
"learning_rate": 2.1159420289855075e-05, |
|
"loss": 0.9665, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.7508771929824561, |
|
"grad_norm": 0.8810436312516507, |
|
"learning_rate": 2.0869565217391306e-05, |
|
"loss": 0.9792, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.7543859649122807, |
|
"grad_norm": 0.9749166994556522, |
|
"learning_rate": 2.0579710144927537e-05, |
|
"loss": 1.0163, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7578947368421053, |
|
"grad_norm": 0.9619307294480671, |
|
"learning_rate": 2.028985507246377e-05, |
|
"loss": 1.0276, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7614035087719299, |
|
"grad_norm": 0.8475320304538961, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0175, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.7649122807017544, |
|
"grad_norm": 0.9215056645276478, |
|
"learning_rate": 1.9710144927536236e-05, |
|
"loss": 1.0349, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.7684210526315789, |
|
"grad_norm": 0.8318566015217065, |
|
"learning_rate": 1.9420289855072467e-05, |
|
"loss": 0.9736, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.7719298245614035, |
|
"grad_norm": 0.8841348427961992, |
|
"learning_rate": 1.9130434782608697e-05, |
|
"loss": 1.0028, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.775438596491228, |
|
"grad_norm": 0.8726663458778539, |
|
"learning_rate": 1.8840579710144928e-05, |
|
"loss": 0.9837, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.7789473684210526, |
|
"grad_norm": 0.8933308297868434, |
|
"learning_rate": 1.8550724637681162e-05, |
|
"loss": 0.9875, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.7824561403508772, |
|
"grad_norm": 0.8473190274519702, |
|
"learning_rate": 1.8260869565217393e-05, |
|
"loss": 0.9822, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.7859649122807018, |
|
"grad_norm": 0.8786630261494585, |
|
"learning_rate": 1.7971014492753624e-05, |
|
"loss": 0.9785, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 0.8769937018569535, |
|
"learning_rate": 1.7681159420289858e-05, |
|
"loss": 0.9863, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7929824561403509, |
|
"grad_norm": 0.9418240902774399, |
|
"learning_rate": 1.739130434782609e-05, |
|
"loss": 0.997, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.7964912280701755, |
|
"grad_norm": 0.9360134126214504, |
|
"learning_rate": 1.710144927536232e-05, |
|
"loss": 0.9863, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.999458819703, |
|
"learning_rate": 1.681159420289855e-05, |
|
"loss": 0.9498, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.8035087719298246, |
|
"grad_norm": 0.8238131544325801, |
|
"learning_rate": 1.6521739130434785e-05, |
|
"loss": 0.9609, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.8070175438596491, |
|
"grad_norm": 0.9110168722541372, |
|
"learning_rate": 1.6231884057971015e-05, |
|
"loss": 0.9726, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8105263157894737, |
|
"grad_norm": 0.915280887700176, |
|
"learning_rate": 1.5942028985507246e-05, |
|
"loss": 0.9687, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.8140350877192982, |
|
"grad_norm": 0.8997906447720105, |
|
"learning_rate": 1.565217391304348e-05, |
|
"loss": 1.0236, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.8175438596491228, |
|
"grad_norm": 0.8927454629454755, |
|
"learning_rate": 1.536231884057971e-05, |
|
"loss": 0.96, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.8210526315789474, |
|
"grad_norm": 0.9150163082832309, |
|
"learning_rate": 1.5072463768115944e-05, |
|
"loss": 0.9843, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.8245614035087719, |
|
"grad_norm": 0.9497695522124332, |
|
"learning_rate": 1.4782608695652174e-05, |
|
"loss": 1.0254, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8280701754385965, |
|
"grad_norm": 0.8134079237524361, |
|
"learning_rate": 1.4492753623188407e-05, |
|
"loss": 0.9874, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.8315789473684211, |
|
"grad_norm": 0.854532647109886, |
|
"learning_rate": 1.420289855072464e-05, |
|
"loss": 0.9697, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.8350877192982457, |
|
"grad_norm": 0.902401593136911, |
|
"learning_rate": 1.391304347826087e-05, |
|
"loss": 1.0083, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.8385964912280702, |
|
"grad_norm": 0.8365313967364223, |
|
"learning_rate": 1.3623188405797103e-05, |
|
"loss": 0.9479, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.8385782446580461, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.998, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8456140350877193, |
|
"grad_norm": 0.9130856203558422, |
|
"learning_rate": 1.3043478260869566e-05, |
|
"loss": 0.9934, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.8491228070175438, |
|
"grad_norm": 0.8261442785562587, |
|
"learning_rate": 1.2753623188405797e-05, |
|
"loss": 0.9274, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.8526315789473684, |
|
"grad_norm": 0.8560538840518993, |
|
"learning_rate": 1.2463768115942029e-05, |
|
"loss": 0.9511, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.856140350877193, |
|
"grad_norm": 0.8699104552871371, |
|
"learning_rate": 1.2173913043478263e-05, |
|
"loss": 0.958, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.8596491228070176, |
|
"grad_norm": 0.9107920631411749, |
|
"learning_rate": 1.1884057971014494e-05, |
|
"loss": 1.0177, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8631578947368421, |
|
"grad_norm": 0.8776802277406592, |
|
"learning_rate": 1.1594202898550726e-05, |
|
"loss": 0.9261, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8666666666666667, |
|
"grad_norm": 0.8999713914824392, |
|
"learning_rate": 1.1304347826086957e-05, |
|
"loss": 0.9973, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.8701754385964913, |
|
"grad_norm": 0.8835866950552989, |
|
"learning_rate": 1.101449275362319e-05, |
|
"loss": 1.0028, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.8736842105263158, |
|
"grad_norm": 0.9243563863707843, |
|
"learning_rate": 1.0724637681159422e-05, |
|
"loss": 0.9971, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.8771929824561403, |
|
"grad_norm": 0.9287603572835252, |
|
"learning_rate": 1.0434782608695653e-05, |
|
"loss": 0.9902, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8807017543859649, |
|
"grad_norm": 0.8670790287211418, |
|
"learning_rate": 1.0144927536231885e-05, |
|
"loss": 1.0039, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.8842105263157894, |
|
"grad_norm": 0.8663791969051364, |
|
"learning_rate": 9.855072463768118e-06, |
|
"loss": 0.9817, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.887719298245614, |
|
"grad_norm": 0.8631015068740149, |
|
"learning_rate": 9.565217391304349e-06, |
|
"loss": 1.0173, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.8912280701754386, |
|
"grad_norm": 0.8829365467183184, |
|
"learning_rate": 9.275362318840581e-06, |
|
"loss": 0.9578, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.8947368421052632, |
|
"grad_norm": 0.9001038483145964, |
|
"learning_rate": 8.985507246376812e-06, |
|
"loss": 1.0258, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8982456140350877, |
|
"grad_norm": 0.8834099073060356, |
|
"learning_rate": 8.695652173913044e-06, |
|
"loss": 0.9726, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.9017543859649123, |
|
"grad_norm": 0.8221574328830703, |
|
"learning_rate": 8.405797101449275e-06, |
|
"loss": 0.9318, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.9052631578947369, |
|
"grad_norm": 0.8603145991699224, |
|
"learning_rate": 8.115942028985508e-06, |
|
"loss": 0.9663, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.9087719298245615, |
|
"grad_norm": 0.8557514339905812, |
|
"learning_rate": 7.82608695652174e-06, |
|
"loss": 0.9968, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.9122807017543859, |
|
"grad_norm": 0.8263665060539463, |
|
"learning_rate": 7.536231884057972e-06, |
|
"loss": 1.0022, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9157894736842105, |
|
"grad_norm": 0.9157072836324722, |
|
"learning_rate": 7.246376811594203e-06, |
|
"loss": 0.9387, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.9192982456140351, |
|
"grad_norm": 0.9233305451677062, |
|
"learning_rate": 6.956521739130435e-06, |
|
"loss": 0.9934, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.9228070175438596, |
|
"grad_norm": 0.903604456186341, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.9502, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.9263157894736842, |
|
"grad_norm": 0.9285076793745337, |
|
"learning_rate": 6.376811594202898e-06, |
|
"loss": 0.9645, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.9298245614035088, |
|
"grad_norm": 0.888909609341721, |
|
"learning_rate": 6.086956521739132e-06, |
|
"loss": 0.946, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9333333333333333, |
|
"grad_norm": 0.9438455431857006, |
|
"learning_rate": 5.797101449275363e-06, |
|
"loss": 1.0153, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.9368421052631579, |
|
"grad_norm": 0.8551557910334344, |
|
"learning_rate": 5.507246376811595e-06, |
|
"loss": 0.958, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9403508771929825, |
|
"grad_norm": 0.8234220234488231, |
|
"learning_rate": 5.2173913043478265e-06, |
|
"loss": 0.96, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.9438596491228071, |
|
"grad_norm": 0.9003874584417242, |
|
"learning_rate": 4.927536231884059e-06, |
|
"loss": 0.9757, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 0.89021747241037, |
|
"learning_rate": 4.637681159420291e-06, |
|
"loss": 0.9511, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9508771929824561, |
|
"grad_norm": 0.850549521859951, |
|
"learning_rate": 4.347826086956522e-06, |
|
"loss": 0.9619, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.9543859649122807, |
|
"grad_norm": 0.8477206031920781, |
|
"learning_rate": 4.057971014492754e-06, |
|
"loss": 0.9169, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.9578947368421052, |
|
"grad_norm": 0.9199733593357698, |
|
"learning_rate": 3.768115942028986e-06, |
|
"loss": 1.0299, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.9614035087719298, |
|
"grad_norm": 0.8038841148302012, |
|
"learning_rate": 3.4782608695652175e-06, |
|
"loss": 0.9584, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.9649122807017544, |
|
"grad_norm": 0.8488075839645941, |
|
"learning_rate": 3.188405797101449e-06, |
|
"loss": 0.9868, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.968421052631579, |
|
"grad_norm": 0.8355215697229909, |
|
"learning_rate": 2.8985507246376816e-06, |
|
"loss": 1.009, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.9719298245614035, |
|
"grad_norm": 0.9354156162463755, |
|
"learning_rate": 2.6086956521739132e-06, |
|
"loss": 0.9914, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.9754385964912281, |
|
"grad_norm": 0.8504047654484729, |
|
"learning_rate": 2.3188405797101453e-06, |
|
"loss": 1.0137, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.9789473684210527, |
|
"grad_norm": 0.9106574593705012, |
|
"learning_rate": 2.028985507246377e-06, |
|
"loss": 0.9963, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.9824561403508771, |
|
"grad_norm": 0.8356909679028329, |
|
"learning_rate": 1.7391304347826088e-06, |
|
"loss": 1.0176, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9859649122807017, |
|
"grad_norm": 0.8904235043433295, |
|
"learning_rate": 1.4492753623188408e-06, |
|
"loss": 0.9684, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.9894736842105263, |
|
"grad_norm": 0.8663330662929399, |
|
"learning_rate": 1.1594202898550726e-06, |
|
"loss": 0.9852, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.9929824561403509, |
|
"grad_norm": 0.8453724734461978, |
|
"learning_rate": 8.695652173913044e-07, |
|
"loss": 0.9599, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.9964912280701754, |
|
"grad_norm": 0.8659094349837689, |
|
"learning_rate": 5.797101449275363e-07, |
|
"loss": 1.0253, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.8132420261672163, |
|
"learning_rate": 2.8985507246376816e-07, |
|
"loss": 0.8957, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 285, |
|
"total_flos": 224781354270720.0, |
|
"train_loss": 1.0555618083267881, |
|
"train_runtime": 2779.4288, |
|
"train_samples_per_second": 52.454, |
|
"train_steps_per_second": 0.103 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 285, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 700, |
|
"total_flos": 224781354270720.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|