|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 198420, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9874004636629375e-05, |
|
"loss": 2.688, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.974800927325874e-05, |
|
"loss": 2.2881, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.962201390988812e-05, |
|
"loss": 2.1112, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.949601854651749e-05, |
|
"loss": 1.9839, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9370023183146866e-05, |
|
"loss": 1.9138, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.924402781977623e-05, |
|
"loss": 1.8549, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9118032456405605e-05, |
|
"loss": 1.8107, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.899203709303498e-05, |
|
"loss": 1.7686, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.886604172966435e-05, |
|
"loss": 1.7388, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.874004636629372e-05, |
|
"loss": 1.6948, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8614051002923096e-05, |
|
"loss": 1.6784, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.848805563955247e-05, |
|
"loss": 1.6304, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8362060276181834e-05, |
|
"loss": 1.6583, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8236064912811214e-05, |
|
"loss": 1.5898, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.811006954944059e-05, |
|
"loss": 1.5856, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.798407418606995e-05, |
|
"loss": 1.5539, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7858078822699325e-05, |
|
"loss": 1.5607, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.77320834593287e-05, |
|
"loss": 1.5663, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.760608809595807e-05, |
|
"loss": 1.5291, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7480092732587444e-05, |
|
"loss": 1.5196, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7354097369216816e-05, |
|
"loss": 1.5014, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.722810200584619e-05, |
|
"loss": 1.4796, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7102106642475555e-05, |
|
"loss": 1.4735, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.697611127910493e-05, |
|
"loss": 1.4725, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.685011591573431e-05, |
|
"loss": 1.4906, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.672412055236367e-05, |
|
"loss": 1.4598, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6598125188993046e-05, |
|
"loss": 1.4293, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.647212982562242e-05, |
|
"loss": 1.4304, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.634613446225179e-05, |
|
"loss": 1.4401, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6220139098881164e-05, |
|
"loss": 1.4463, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.609414373551054e-05, |
|
"loss": 1.3965, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.596814837213991e-05, |
|
"loss": 1.4147, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5842153008769276e-05, |
|
"loss": 1.3864, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.571615764539865e-05, |
|
"loss": 1.3718, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.559016228202803e-05, |
|
"loss": 1.3891, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5464166918657394e-05, |
|
"loss": 1.3762, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5338171555286767e-05, |
|
"loss": 1.3486, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.521217619191614e-05, |
|
"loss": 1.3795, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.508618082854551e-05, |
|
"loss": 1.3564, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.496018546517488e-05, |
|
"loss": 1.3572, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.483419010180426e-05, |
|
"loss": 1.3434, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.470819473843363e-05, |
|
"loss": 1.3337, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4582199375063e-05, |
|
"loss": 1.3233, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.445620401169237e-05, |
|
"loss": 1.3345, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.433020864832174e-05, |
|
"loss": 1.3145, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.420421328495112e-05, |
|
"loss": 1.3329, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.407821792158049e-05, |
|
"loss": 1.3152, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.395222255820986e-05, |
|
"loss": 1.3237, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.382622719483923e-05, |
|
"loss": 1.3108, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3700231831468605e-05, |
|
"loss": 1.3097, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.357423646809798e-05, |
|
"loss": 1.3101, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.344824110472735e-05, |
|
"loss": 1.3018, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3322245741356724e-05, |
|
"loss": 1.3023, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.319625037798609e-05, |
|
"loss": 1.2938, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.307025501461546e-05, |
|
"loss": 1.295, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2944259651244835e-05, |
|
"loss": 1.2941, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.281826428787421e-05, |
|
"loss": 1.2825, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.269226892450358e-05, |
|
"loss": 1.2581, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.256627356113295e-05, |
|
"loss": 1.2742, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2440278197762326e-05, |
|
"loss": 1.2623, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.231428283439169e-05, |
|
"loss": 1.245, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.218828747102107e-05, |
|
"loss": 1.2664, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.2062292107650444e-05, |
|
"loss": 1.2687, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.193629674427981e-05, |
|
"loss": 1.2493, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.181030138090918e-05, |
|
"loss": 1.2562, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1684306017538556e-05, |
|
"loss": 1.2501, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.155831065416793e-05, |
|
"loss": 1.2298, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.14323152907973e-05, |
|
"loss": 1.2554, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1306319927426674e-05, |
|
"loss": 1.2565, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1180324564056046e-05, |
|
"loss": 1.2386, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.105432920068541e-05, |
|
"loss": 1.2129, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0928333837314785e-05, |
|
"loss": 1.2246, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0802338473944165e-05, |
|
"loss": 1.2231, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.067634311057353e-05, |
|
"loss": 1.2392, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05503477472029e-05, |
|
"loss": 1.2326, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0424352383832276e-05, |
|
"loss": 1.2224, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.029835702046165e-05, |
|
"loss": 1.2076, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.017236165709102e-05, |
|
"loss": 1.2093, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.0046366293720394e-05, |
|
"loss": 1.2156, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.992037093034977e-05, |
|
"loss": 1.2158, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.979437556697913e-05, |
|
"loss": 1.2221, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9668380203608506e-05, |
|
"loss": 1.2, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9542384840237885e-05, |
|
"loss": 1.203, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.941638947686726e-05, |
|
"loss": 1.209, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9290394113496624e-05, |
|
"loss": 1.184, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9164398750126e-05, |
|
"loss": 1.2053, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.903840338675537e-05, |
|
"loss": 1.1807, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.891240802338474e-05, |
|
"loss": 1.1921, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8786412660014115e-05, |
|
"loss": 1.1877, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.866041729664349e-05, |
|
"loss": 1.2047, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.853442193327286e-05, |
|
"loss": 1.1715, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8408426569902226e-05, |
|
"loss": 1.1882, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.82824312065316e-05, |
|
"loss": 1.1839, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.815643584316098e-05, |
|
"loss": 1.1821, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8030440479790345e-05, |
|
"loss": 1.1737, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.790444511641972e-05, |
|
"loss": 1.1848, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.777844975304909e-05, |
|
"loss": 1.1982, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.765245438967846e-05, |
|
"loss": 1.1675, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7526459026307835e-05, |
|
"loss": 1.1568, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.740046366293721e-05, |
|
"loss": 1.1629, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.727446829956658e-05, |
|
"loss": 1.1451, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.714847293619595e-05, |
|
"loss": 1.1649, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.702247757282532e-05, |
|
"loss": 1.1627, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.689648220945469e-05, |
|
"loss": 1.1587, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6770486846084065e-05, |
|
"loss": 1.1531, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.664449148271344e-05, |
|
"loss": 1.1359, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.651849611934281e-05, |
|
"loss": 1.1591, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.639250075597218e-05, |
|
"loss": 1.1475, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.626650539260155e-05, |
|
"loss": 1.1728, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.614051002923093e-05, |
|
"loss": 1.1666, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.60145146658603e-05, |
|
"loss": 1.1417, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.588851930248967e-05, |
|
"loss": 1.1421, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.576252393911904e-05, |
|
"loss": 1.1618, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.563652857574841e-05, |
|
"loss": 1.141, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5510533212377786e-05, |
|
"loss": 1.1487, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.538453784900716e-05, |
|
"loss": 1.1327, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.525854248563653e-05, |
|
"loss": 1.1478, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5132547122265904e-05, |
|
"loss": 1.1354, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.500655175889527e-05, |
|
"loss": 1.1326, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.488055639552464e-05, |
|
"loss": 1.1268, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.475456103215402e-05, |
|
"loss": 1.1475, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4628565668783395e-05, |
|
"loss": 1.1112, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.450257030541276e-05, |
|
"loss": 1.1328, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4376574942042134e-05, |
|
"loss": 1.1271, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4250579578671506e-05, |
|
"loss": 1.1272, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.412458421530088e-05, |
|
"loss": 1.1104, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.399858885193025e-05, |
|
"loss": 1.1017, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3872593488559624e-05, |
|
"loss": 1.1137, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3746598125189e-05, |
|
"loss": 1.1178, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.362060276181836e-05, |
|
"loss": 1.1247, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3494607398447736e-05, |
|
"loss": 1.1114, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3368612035077115e-05, |
|
"loss": 1.1013, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.324261667170648e-05, |
|
"loss": 1.032, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3116621308335854e-05, |
|
"loss": 0.9762, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.299062594496523e-05, |
|
"loss": 0.9998, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.28646305815946e-05, |
|
"loss": 0.9951, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.273863521822397e-05, |
|
"loss": 0.984, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2612639854853345e-05, |
|
"loss": 0.987, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.248664449148272e-05, |
|
"loss": 0.9793, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2360649128112084e-05, |
|
"loss": 1.0024, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2234653764741457e-05, |
|
"loss": 1.0008, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2108658401370836e-05, |
|
"loss": 0.996, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.19826630380002e-05, |
|
"loss": 0.9927, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1856667674629575e-05, |
|
"loss": 0.9943, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.173067231125895e-05, |
|
"loss": 0.9905, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.160467694788832e-05, |
|
"loss": 0.997, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1478681584517686e-05, |
|
"loss": 0.9831, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1352686221147066e-05, |
|
"loss": 0.9863, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.122669085777644e-05, |
|
"loss": 0.9961, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1100695494405804e-05, |
|
"loss": 0.985, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.097470013103518e-05, |
|
"loss": 0.9954, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.084870476766455e-05, |
|
"loss": 0.9813, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.072270940429392e-05, |
|
"loss": 0.9961, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0596714040923295e-05, |
|
"loss": 0.9968, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0470718677552668e-05, |
|
"loss": 1.0083, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0344723314182037e-05, |
|
"loss": 0.9952, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.021872795081141e-05, |
|
"loss": 0.9919, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0092732587440786e-05, |
|
"loss": 0.9836, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9966737224070156e-05, |
|
"loss": 0.9868, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.984074186069953e-05, |
|
"loss": 1.012, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9714746497328898e-05, |
|
"loss": 0.9844, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.958875113395827e-05, |
|
"loss": 0.9899, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.946275577058764e-05, |
|
"loss": 0.9693, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.933676040721702e-05, |
|
"loss": 0.9793, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.921076504384639e-05, |
|
"loss": 0.9868, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.908476968047576e-05, |
|
"loss": 0.9842, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.895877431710513e-05, |
|
"loss": 0.9685, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.88327789537345e-05, |
|
"loss": 0.9832, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.870678359036388e-05, |
|
"loss": 0.98, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.858078822699325e-05, |
|
"loss": 0.9895, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.845479286362262e-05, |
|
"loss": 0.9798, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.832879750025199e-05, |
|
"loss": 0.9704, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8202802136881364e-05, |
|
"loss": 0.9718, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.807680677351074e-05, |
|
"loss": 0.9781, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.795081141014011e-05, |
|
"loss": 0.9745, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7824816046769482e-05, |
|
"loss": 0.9777, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.769882068339885e-05, |
|
"loss": 0.991, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7572825320028224e-05, |
|
"loss": 0.9719, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7446829956657593e-05, |
|
"loss": 0.9648, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.732083459328697e-05, |
|
"loss": 0.9722, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7194839229916342e-05, |
|
"loss": 0.9762, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.706884386654571e-05, |
|
"loss": 0.9843, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6942848503175084e-05, |
|
"loss": 0.9765, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6816853139804454e-05, |
|
"loss": 0.9799, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.669085777643383e-05, |
|
"loss": 0.982, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6564862413063203e-05, |
|
"loss": 0.9774, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6438867049692572e-05, |
|
"loss": 0.9785, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6312871686321945e-05, |
|
"loss": 0.9743, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6186876322951314e-05, |
|
"loss": 0.9636, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.606088095958069e-05, |
|
"loss": 0.9695, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5934885596210063e-05, |
|
"loss": 0.9645, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5808890232839432e-05, |
|
"loss": 0.9772, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5682894869468805e-05, |
|
"loss": 0.9525, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5556899506098174e-05, |
|
"loss": 0.9523, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5430904142727547e-05, |
|
"loss": 0.9544, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5304908779356923e-05, |
|
"loss": 0.9656, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5178913415986292e-05, |
|
"loss": 0.964, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5052918052615665e-05, |
|
"loss": 0.9629, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4926922689245035e-05, |
|
"loss": 0.9499, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.480092732587441e-05, |
|
"loss": 0.9429, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.467493196250378e-05, |
|
"loss": 0.9622, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4548936599133153e-05, |
|
"loss": 0.9621, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4422941235762525e-05, |
|
"loss": 0.9588, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4296945872391895e-05, |
|
"loss": 0.9616, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.417095050902127e-05, |
|
"loss": 0.9563, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.404495514565064e-05, |
|
"loss": 0.9578, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3918959782280013e-05, |
|
"loss": 0.9511, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3792964418909386e-05, |
|
"loss": 0.9685, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.366696905553876e-05, |
|
"loss": 0.9645, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.354097369216813e-05, |
|
"loss": 0.9464, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.34149783287975e-05, |
|
"loss": 0.946, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3288982965426873e-05, |
|
"loss": 0.9529, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3162987602056246e-05, |
|
"loss": 0.9481, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.303699223868562e-05, |
|
"loss": 0.9501, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2910996875314988e-05, |
|
"loss": 0.9357, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.278500151194436e-05, |
|
"loss": 0.9532, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2659006148573734e-05, |
|
"loss": 0.9488, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2533010785203106e-05, |
|
"loss": 0.9322, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.240701542183248e-05, |
|
"loss": 0.9514, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.228102005846185e-05, |
|
"loss": 0.9613, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.215502469509122e-05, |
|
"loss": 0.9399, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2029029331720594e-05, |
|
"loss": 0.9492, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1903033968349963e-05, |
|
"loss": 0.9261, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.177703860497934e-05, |
|
"loss": 0.9527, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.165104324160871e-05, |
|
"loss": 0.9458, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1525047878238085e-05, |
|
"loss": 0.931, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1399052514867454e-05, |
|
"loss": 0.9366, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1273057151496824e-05, |
|
"loss": 0.9383, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.11470617881262e-05, |
|
"loss": 0.9284, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.102106642475557e-05, |
|
"loss": 0.9305, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0895071061384942e-05, |
|
"loss": 0.9157, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0769075698014314e-05, |
|
"loss": 0.9431, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0643080334643687e-05, |
|
"loss": 0.9255, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.051708497127306e-05, |
|
"loss": 0.9323, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.039108960790243e-05, |
|
"loss": 0.9396, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0265094244531802e-05, |
|
"loss": 0.9209, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0139098881161175e-05, |
|
"loss": 0.9257, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0013103517790548e-05, |
|
"loss": 0.9306, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9887108154419917e-05, |
|
"loss": 0.9373, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.976111279104929e-05, |
|
"loss": 0.9222, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9635117427678662e-05, |
|
"loss": 0.9197, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.950912206430803e-05, |
|
"loss": 0.9244, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9383126700937408e-05, |
|
"loss": 0.943, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9257131337566777e-05, |
|
"loss": 0.9351, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.913113597419615e-05, |
|
"loss": 0.928, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9005140610825523e-05, |
|
"loss": 0.9206, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8879145247454892e-05, |
|
"loss": 0.9219, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8753149884084268e-05, |
|
"loss": 0.9073, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8627154520713637e-05, |
|
"loss": 0.9252, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.850115915734301e-05, |
|
"loss": 0.9161, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8375163793972383e-05, |
|
"loss": 0.9181, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8249168430601756e-05, |
|
"loss": 0.9225, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.812317306723113e-05, |
|
"loss": 0.9108, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7997177703860498e-05, |
|
"loss": 0.9115, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.787118234048987e-05, |
|
"loss": 0.9239, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7745186977119243e-05, |
|
"loss": 0.9213, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7619191613748616e-05, |
|
"loss": 0.9211, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7493196250377985e-05, |
|
"loss": 0.9252, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7367200887007358e-05, |
|
"loss": 0.9165, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.724120552363673e-05, |
|
"loss": 0.928, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7115210160266104e-05, |
|
"loss": 0.9208, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6989214796895476e-05, |
|
"loss": 0.9182, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6863219433524846e-05, |
|
"loss": 0.9167, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.673722407015422e-05, |
|
"loss": 0.9354, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.661122870678359e-05, |
|
"loss": 0.8513, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.648523334341296e-05, |
|
"loss": 0.8098, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6359237980042337e-05, |
|
"loss": 0.7988, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6233242616671706e-05, |
|
"loss": 0.8004, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6107247253301082e-05, |
|
"loss": 0.8006, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.598125188993045e-05, |
|
"loss": 0.7905, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5855256526559824e-05, |
|
"loss": 0.7937, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5729261163189197e-05, |
|
"loss": 0.7973, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5603265799818566e-05, |
|
"loss": 0.8132, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.547727043644794e-05, |
|
"loss": 0.8012, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.535127507307731e-05, |
|
"loss": 0.8014, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5225279709706683e-05, |
|
"loss": 0.8053, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5099284346336057e-05, |
|
"loss": 0.7884, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4973288982965428e-05, |
|
"loss": 0.8122, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.48472936195948e-05, |
|
"loss": 0.8029, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4721298256224172e-05, |
|
"loss": 0.8, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4595302892853543e-05, |
|
"loss": 0.7929, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4469307529482914e-05, |
|
"loss": 0.8066, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4343312166112288e-05, |
|
"loss": 0.7947, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.421731680274166e-05, |
|
"loss": 0.7977, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4091321439371032e-05, |
|
"loss": 0.7844, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3965326076000403e-05, |
|
"loss": 0.7857, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3839330712629774e-05, |
|
"loss": 0.7947, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3713335349259149e-05, |
|
"loss": 0.7909, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.358733998588852e-05, |
|
"loss": 0.7802, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.346134462251789e-05, |
|
"loss": 0.79, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3335349259147265e-05, |
|
"loss": 0.7871, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3209353895776636e-05, |
|
"loss": 0.7805, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3083358532406009e-05, |
|
"loss": 0.7923, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.295736316903538e-05, |
|
"loss": 0.7878, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2831367805664751e-05, |
|
"loss": 0.7967, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2705372442294126e-05, |
|
"loss": 0.7885, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2579377078923497e-05, |
|
"loss": 0.7872, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2453381715552868e-05, |
|
"loss": 0.8009, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.232738635218224e-05, |
|
"loss": 0.7915, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2201390988811611e-05, |
|
"loss": 0.7895, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2075395625440984e-05, |
|
"loss": 0.7752, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1949400262070357e-05, |
|
"loss": 0.7927, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.182340489869973e-05, |
|
"loss": 0.7833, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.16974095353291e-05, |
|
"loss": 0.8001, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1571414171958472e-05, |
|
"loss": 0.7981, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1445418808587844e-05, |
|
"loss": 0.7775, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1319423445217217e-05, |
|
"loss": 0.7852, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1193428081846588e-05, |
|
"loss": 0.7913, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1067432718475961e-05, |
|
"loss": 0.7735, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0941437355105334e-05, |
|
"loss": 0.7837, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0815441991734705e-05, |
|
"loss": 0.7872, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0689446628364076e-05, |
|
"loss": 0.786, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0563451264993449e-05, |
|
"loss": 0.7824, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0437455901622821e-05, |
|
"loss": 0.7877, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0311460538252194e-05, |
|
"loss": 0.7916, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0185465174881565e-05, |
|
"loss": 0.7886, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0059469811510936e-05, |
|
"loss": 0.7913, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.933474448140309e-06, |
|
"loss": 0.7895, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.807479084769682e-06, |
|
"loss": 0.7897, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.681483721399053e-06, |
|
"loss": 0.7791, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.555488358028425e-06, |
|
"loss": 0.7843, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.429492994657798e-06, |
|
"loss": 0.7766, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.303497631287169e-06, |
|
"loss": 0.7818, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.17750226791654e-06, |
|
"loss": 0.7693, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.051506904545913e-06, |
|
"loss": 0.7673, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.925511541175286e-06, |
|
"loss": 0.7785, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.799516177804658e-06, |
|
"loss": 0.7903, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.67352081443403e-06, |
|
"loss": 0.7746, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.5475254510634e-06, |
|
"loss": 0.789, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.421530087692773e-06, |
|
"loss": 0.785, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.295534724322146e-06, |
|
"loss": 0.7753, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.169539360951517e-06, |
|
"loss": 0.7868, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.04354399758089e-06, |
|
"loss": 0.7737, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.917548634210262e-06, |
|
"loss": 0.7822, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.791553270839633e-06, |
|
"loss": 0.7627, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.665557907469004e-06, |
|
"loss": 0.7791, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.539562544098377e-06, |
|
"loss": 0.7829, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.41356718072775e-06, |
|
"loss": 0.7713, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.287571817357122e-06, |
|
"loss": 0.7729, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.161576453986493e-06, |
|
"loss": 0.7776, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.035581090615866e-06, |
|
"loss": 0.7708, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.909585727245238e-06, |
|
"loss": 0.7757, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.78359036387461e-06, |
|
"loss": 0.7628, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.657595000503981e-06, |
|
"loss": 0.7799, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.531599637133354e-06, |
|
"loss": 0.7735, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.405604273762726e-06, |
|
"loss": 0.7719, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.279608910392099e-06, |
|
"loss": 0.775, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.1536135470214705e-06, |
|
"loss": 0.7621, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.027618183650842e-06, |
|
"loss": 0.7791, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.901622820280214e-06, |
|
"loss": 0.771, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.775627456909586e-06, |
|
"loss": 0.7671, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.649632093538958e-06, |
|
"loss": 0.776, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.52363673016833e-06, |
|
"loss": 0.7749, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.397641366797703e-06, |
|
"loss": 0.7824, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.271646003427074e-06, |
|
"loss": 0.7729, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.1456506400564465e-06, |
|
"loss": 0.7823, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.019655276685818e-06, |
|
"loss": 0.7834, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.89365991331519e-06, |
|
"loss": 0.7813, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.767664549944562e-06, |
|
"loss": 0.7683, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.641669186573934e-06, |
|
"loss": 0.761, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.515673823203306e-06, |
|
"loss": 0.7549, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.389678459832678e-06, |
|
"loss": 0.7734, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.2636830964620506e-06, |
|
"loss": 0.7648, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.1376877330914225e-06, |
|
"loss": 0.7518, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.011692369720794e-06, |
|
"loss": 0.7578, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.885697006350166e-06, |
|
"loss": 0.7673, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.7597016429795386e-06, |
|
"loss": 0.7848, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6337062796089104e-06, |
|
"loss": 0.7678, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5077109162382828e-06, |
|
"loss": 0.7702, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3817155528676542e-06, |
|
"loss": 0.7643, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.2557201894970265e-06, |
|
"loss": 0.7612, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1297248261263984e-06, |
|
"loss": 0.7685, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0037294627557707e-06, |
|
"loss": 0.7564, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8777340993851426e-06, |
|
"loss": 0.7607, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.751738736014515e-06, |
|
"loss": 0.7596, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.625743372643887e-06, |
|
"loss": 0.7622, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4997480092732587e-06, |
|
"loss": 0.7537, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.373752645902631e-06, |
|
"loss": 0.7591, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.247757282532003e-06, |
|
"loss": 0.7586, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1217619191613752e-06, |
|
"loss": 0.7731, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.995766555790747e-06, |
|
"loss": 0.7641, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.869771192420119e-06, |
|
"loss": 0.7527, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.743775829049491e-06, |
|
"loss": 0.7585, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6177804656788632e-06, |
|
"loss": 0.7585, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4917851023082353e-06, |
|
"loss": 0.7581, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3657897389376072e-06, |
|
"loss": 0.7578, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2397943755669793e-06, |
|
"loss": 0.7728, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1137990121963514e-06, |
|
"loss": 0.7493, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.87803648825723e-07, |
|
"loss": 0.7584, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.618082854550952e-07, |
|
"loss": 0.7619, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.358129220844674e-07, |
|
"loss": 0.7611, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.098175587138394e-07, |
|
"loss": 0.7565, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.838221953432114e-07, |
|
"loss": 0.7565, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5782683197258347e-07, |
|
"loss": 0.7528, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.318314686019555e-07, |
|
"loss": 0.7524, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.058361052313275e-07, |
|
"loss": 0.7653, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 198420, |
|
"total_flos": 2.7061438809725338e+17, |
|
"train_loss": 1.0211255452190962, |
|
"train_runtime": 50903.0981, |
|
"train_samples_per_second": 38.98, |
|
"train_steps_per_second": 3.898 |
|
} |
|
], |
|
"max_steps": 198420, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.7061438809725338e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|