|
{ |
|
"best_metric": 1.5570189952850342, |
|
"best_model_checkpoint": "/lrde/home2/jchazalo/soduco/article_das_2022/model_pretrained/checkpoint-301038", |
|
"epoch": 3.0, |
|
"global_step": 301038, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.991695400580658e-05, |
|
"loss": 5.5377, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.983390801161315e-05, |
|
"loss": 4.3409, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.975086201741973e-05, |
|
"loss": 4.0691, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9667816023226304e-05, |
|
"loss": 3.8186, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9584770029032886e-05, |
|
"loss": 3.6468, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.950172403483946e-05, |
|
"loss": 3.4779, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.941867804064603e-05, |
|
"loss": 3.4288, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.933563204645261e-05, |
|
"loss": 3.2539, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.925258605225919e-05, |
|
"loss": 3.1983, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.916954005806576e-05, |
|
"loss": 3.0558, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.908649406387234e-05, |
|
"loss": 3.0145, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.900344806967891e-05, |
|
"loss": 2.9689, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.892040207548549e-05, |
|
"loss": 2.9319, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.883735608129206e-05, |
|
"loss": 2.9474, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8754310087098645e-05, |
|
"loss": 2.8579, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8671264092905214e-05, |
|
"loss": 2.8567, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.858821809871179e-05, |
|
"loss": 2.8157, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.850517210451837e-05, |
|
"loss": 2.7768, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8422126110324947e-05, |
|
"loss": 2.7482, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.833908011613152e-05, |
|
"loss": 2.7077, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.82560341219381e-05, |
|
"loss": 2.6328, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.817298812774467e-05, |
|
"loss": 2.6814, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.808994213355125e-05, |
|
"loss": 2.612, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.800689613935783e-05, |
|
"loss": 2.533, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.79238501451644e-05, |
|
"loss": 2.6266, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7840804150970974e-05, |
|
"loss": 2.5415, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7757758156777556e-05, |
|
"loss": 2.6189, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.767471216258413e-05, |
|
"loss": 2.521, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.75916661683907e-05, |
|
"loss": 2.5118, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7508620174197275e-05, |
|
"loss": 2.5452, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.742557418000386e-05, |
|
"loss": 2.4781, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.734252818581043e-05, |
|
"loss": 2.4724, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.725948219161701e-05, |
|
"loss": 2.4601, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.717643619742358e-05, |
|
"loss": 2.4624, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.709339020323016e-05, |
|
"loss": 2.4557, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.701034420903673e-05, |
|
"loss": 2.4614, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6927298214843315e-05, |
|
"loss": 2.4857, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6844252220649884e-05, |
|
"loss": 2.4286, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.676120622645646e-05, |
|
"loss": 2.4001, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.667816023226304e-05, |
|
"loss": 2.3912, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.659511423806962e-05, |
|
"loss": 2.4437, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.651206824387619e-05, |
|
"loss": 2.3163, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.642902224968277e-05, |
|
"loss": 2.3766, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.634597625548934e-05, |
|
"loss": 2.4041, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.626293026129592e-05, |
|
"loss": 2.3604, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.61798842671025e-05, |
|
"loss": 2.2922, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.609683827290907e-05, |
|
"loss": 2.3668, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6013792278715644e-05, |
|
"loss": 2.2715, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.5930746284522226e-05, |
|
"loss": 2.3434, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.58477002903288e-05, |
|
"loss": 2.3345, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5764654296135376e-05, |
|
"loss": 2.3421, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5681608301941945e-05, |
|
"loss": 2.2897, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.559856230774853e-05, |
|
"loss": 2.3008, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.55155163135551e-05, |
|
"loss": 2.2646, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.543247031936168e-05, |
|
"loss": 2.3077, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.534942432516825e-05, |
|
"loss": 2.271, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.526637833097483e-05, |
|
"loss": 2.3123, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5183332336781403e-05, |
|
"loss": 2.2825, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5100286342587985e-05, |
|
"loss": 2.233, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5017240348394554e-05, |
|
"loss": 2.2651, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.493419435420113e-05, |
|
"loss": 2.2706, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.485114836000771e-05, |
|
"loss": 2.2816, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.476810236581429e-05, |
|
"loss": 2.2128, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.468505637162086e-05, |
|
"loss": 2.2582, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.460201037742744e-05, |
|
"loss": 2.2227, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.451896438323401e-05, |
|
"loss": 2.2747, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.443591838904059e-05, |
|
"loss": 2.2617, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.435287239484717e-05, |
|
"loss": 2.2421, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.426982640065374e-05, |
|
"loss": 2.2419, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4186780406460314e-05, |
|
"loss": 2.2061, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.410373441226689e-05, |
|
"loss": 2.2277, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.402068841807347e-05, |
|
"loss": 2.2249, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3937642423880046e-05, |
|
"loss": 2.2255, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3854596429686615e-05, |
|
"loss": 2.1634, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.37715504354932e-05, |
|
"loss": 2.2847, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.368850444129977e-05, |
|
"loss": 2.1552, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.360545844710635e-05, |
|
"loss": 2.0838, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.352241245291292e-05, |
|
"loss": 2.158, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.34393664587195e-05, |
|
"loss": 2.1425, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3356320464526073e-05, |
|
"loss": 2.2127, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3273274470332656e-05, |
|
"loss": 2.1698, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3190228476139224e-05, |
|
"loss": 2.117, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.31071824819458e-05, |
|
"loss": 2.2138, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.302413648775238e-05, |
|
"loss": 2.156, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.294109049355896e-05, |
|
"loss": 2.1724, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.285804449936553e-05, |
|
"loss": 2.2016, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.277499850517211e-05, |
|
"loss": 2.1283, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.269195251097868e-05, |
|
"loss": 2.1544, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.260890651678526e-05, |
|
"loss": 2.155, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.252586052259183e-05, |
|
"loss": 2.1064, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.244281452839841e-05, |
|
"loss": 2.1419, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2359768534204984e-05, |
|
"loss": 2.1219, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.227672254001156e-05, |
|
"loss": 2.1187, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.219367654581814e-05, |
|
"loss": 2.1089, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2110630551624716e-05, |
|
"loss": 2.0924, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.2027584557431285e-05, |
|
"loss": 2.1622, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.194453856323787e-05, |
|
"loss": 2.1085, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.186149256904444e-05, |
|
"loss": 2.1393, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.177844657485102e-05, |
|
"loss": 2.1106, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.169540058065759e-05, |
|
"loss": 2.0899, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.161235458646417e-05, |
|
"loss": 2.0945, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1529308592270744e-05, |
|
"loss": 2.1392, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1446262598077326e-05, |
|
"loss": 2.0147, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.13632166038839e-05, |
|
"loss": 2.1004, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.128017060969047e-05, |
|
"loss": 2.082, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.119712461549705e-05, |
|
"loss": 2.0517, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.111407862130363e-05, |
|
"loss": 2.0961, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.10310326271102e-05, |
|
"loss": 2.0637, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.094798663291677e-05, |
|
"loss": 2.0417, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.086494063872335e-05, |
|
"loss": 2.1015, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.078189464452993e-05, |
|
"loss": 2.0383, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.06988486503365e-05, |
|
"loss": 2.1315, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.061580265614308e-05, |
|
"loss": 2.1347, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0532756661949654e-05, |
|
"loss": 2.0428, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.044971066775623e-05, |
|
"loss": 2.0839, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.036666467356281e-05, |
|
"loss": 2.0674, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0283618679369387e-05, |
|
"loss": 2.0504, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0200572685175955e-05, |
|
"loss": 2.0351, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.011752669098254e-05, |
|
"loss": 2.0675, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.003448069678911e-05, |
|
"loss": 1.9957, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.995143470259569e-05, |
|
"loss": 2.0214, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.986838870840226e-05, |
|
"loss": 2.0477, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.978534271420884e-05, |
|
"loss": 2.0456, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9702296720015414e-05, |
|
"loss": 2.0229, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9619250725821996e-05, |
|
"loss": 2.0165, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.953620473162857e-05, |
|
"loss": 1.9758, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.945315873743514e-05, |
|
"loss": 2.0707, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.937011274324172e-05, |
|
"loss": 2.0189, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.92870667490483e-05, |
|
"loss": 1.9979, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.920402075485487e-05, |
|
"loss": 2.0263, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.912097476066145e-05, |
|
"loss": 2.0242, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.903792876646802e-05, |
|
"loss": 2.0036, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.89548827722746e-05, |
|
"loss": 1.9882, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.887183677808117e-05, |
|
"loss": 1.9732, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.878879078388775e-05, |
|
"loss": 1.9732, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8705744789694324e-05, |
|
"loss": 2.0815, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.86226987955009e-05, |
|
"loss": 2.0008, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.853965280130748e-05, |
|
"loss": 1.9707, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8456606807114057e-05, |
|
"loss": 1.9812, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8373560812920625e-05, |
|
"loss": 1.9863, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.829051481872721e-05, |
|
"loss": 1.9619, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.820746882453378e-05, |
|
"loss": 1.9504, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.812442283034036e-05, |
|
"loss": 2.015, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.804137683614693e-05, |
|
"loss": 2.0166, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.795833084195351e-05, |
|
"loss": 1.9393, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7875284847760084e-05, |
|
"loss": 1.9721, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7792238853566666e-05, |
|
"loss": 1.9453, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.770919285937324e-05, |
|
"loss": 1.9656, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.762614686517981e-05, |
|
"loss": 1.9776, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7543100870986385e-05, |
|
"loss": 1.9679, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.746005487679297e-05, |
|
"loss": 2.0017, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.737700888259954e-05, |
|
"loss": 1.9355, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.729396288840612e-05, |
|
"loss": 2.0124, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.721091689421269e-05, |
|
"loss": 1.9671, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.712787090001927e-05, |
|
"loss": 1.9514, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.704482490582584e-05, |
|
"loss": 1.9842, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.696177891163242e-05, |
|
"loss": 1.9971, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6878732917438994e-05, |
|
"loss": 1.9237, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.679568692324557e-05, |
|
"loss": 1.9427, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.671264092905215e-05, |
|
"loss": 1.9638, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.662959493485873e-05, |
|
"loss": 1.9808, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6546548940665295e-05, |
|
"loss": 1.9247, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.646350294647188e-05, |
|
"loss": 1.9813, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.638045695227845e-05, |
|
"loss": 1.8976, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.629741095808503e-05, |
|
"loss": 1.9592, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.62143649638916e-05, |
|
"loss": 1.9644, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.613131896969818e-05, |
|
"loss": 1.9495, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6048272975504754e-05, |
|
"loss": 1.9123, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.596522698131133e-05, |
|
"loss": 1.9777, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.588218098711791e-05, |
|
"loss": 1.9372, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.579913499292448e-05, |
|
"loss": 1.9425, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5716088998731055e-05, |
|
"loss": 1.8948, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.563304300453764e-05, |
|
"loss": 1.9407, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.554999701034421e-05, |
|
"loss": 1.9079, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.546695101615079e-05, |
|
"loss": 1.9738, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.538390502195736e-05, |
|
"loss": 1.9636, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.530085902776394e-05, |
|
"loss": 1.9252, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5217813033570513e-05, |
|
"loss": 1.9185, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5134767039377096e-05, |
|
"loss": 1.9472, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5051721045183664e-05, |
|
"loss": 1.9579, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.496867505099024e-05, |
|
"loss": 1.9113, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.488562905679682e-05, |
|
"loss": 1.949, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.48025830626034e-05, |
|
"loss": 1.9589, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4719537068409965e-05, |
|
"loss": 1.8764, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.463649107421655e-05, |
|
"loss": 1.9191, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.455344508002312e-05, |
|
"loss": 1.8962, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.44703990858297e-05, |
|
"loss": 1.9213, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.438735309163627e-05, |
|
"loss": 1.8769, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.430430709744285e-05, |
|
"loss": 1.9639, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4221261103249424e-05, |
|
"loss": 1.919, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4138215109056e-05, |
|
"loss": 1.911, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.405516911486258e-05, |
|
"loss": 1.9024, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.397212312066915e-05, |
|
"loss": 1.871, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3889077126475725e-05, |
|
"loss": 1.9182, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.380603113228231e-05, |
|
"loss": 1.919, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.372298513808888e-05, |
|
"loss": 1.9493, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.363993914389546e-05, |
|
"loss": 1.8757, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.355689314970203e-05, |
|
"loss": 1.9248, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.347384715550861e-05, |
|
"loss": 1.8484, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3390801161315183e-05, |
|
"loss": 1.9603, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.8004786968231201, |
|
"eval_runtime": 62.0584, |
|
"eval_samples_per_second": 680.826, |
|
"eval_steps_per_second": 85.113, |
|
"step": 100346 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3307755167121766e-05, |
|
"loss": 1.9152, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3224709172928334e-05, |
|
"loss": 1.8557, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.314166317873491e-05, |
|
"loss": 1.9144, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.305861718454149e-05, |
|
"loss": 1.8657, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.297557119034807e-05, |
|
"loss": 1.8971, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.289252519615464e-05, |
|
"loss": 1.7939, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.280947920196122e-05, |
|
"loss": 1.8882, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.272643320776779e-05, |
|
"loss": 1.8827, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.264338721357437e-05, |
|
"loss": 1.8452, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.256034121938094e-05, |
|
"loss": 1.8755, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.247729522518752e-05, |
|
"loss": 1.8606, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2394249230994094e-05, |
|
"loss": 1.8403, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.231120323680067e-05, |
|
"loss": 1.8451, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.222815724260725e-05, |
|
"loss": 1.8768, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.214511124841382e-05, |
|
"loss": 1.8505, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2062065254220395e-05, |
|
"loss": 1.8806, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.197901926002698e-05, |
|
"loss": 1.8913, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.189597326583355e-05, |
|
"loss": 1.8635, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.181292727164013e-05, |
|
"loss": 1.8529, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.17298812774467e-05, |
|
"loss": 1.7873, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.164683528325328e-05, |
|
"loss": 1.8758, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1563789289059854e-05, |
|
"loss": 1.8157, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1480743294866436e-05, |
|
"loss": 1.8983, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1397697300673004e-05, |
|
"loss": 1.8393, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.131465130647958e-05, |
|
"loss": 1.8297, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.123160531228616e-05, |
|
"loss": 1.8603, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.114855931809274e-05, |
|
"loss": 1.8201, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.106551332389931e-05, |
|
"loss": 1.823, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.098246732970588e-05, |
|
"loss": 1.8348, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.089942133551246e-05, |
|
"loss": 1.7525, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.081637534131904e-05, |
|
"loss": 1.9044, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.073332934712561e-05, |
|
"loss": 1.8235, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.065028335293219e-05, |
|
"loss": 1.8582, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0567237358738764e-05, |
|
"loss": 1.8067, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.048419136454534e-05, |
|
"loss": 1.8657, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0401145370351918e-05, |
|
"loss": 1.8455, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0318099376158493e-05, |
|
"loss": 1.8431, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.023505338196507e-05, |
|
"loss": 1.8478, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0152007387771647e-05, |
|
"loss": 1.8247, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0068961393578222e-05, |
|
"loss": 1.846, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9985915399384794e-05, |
|
"loss": 1.8054, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9902869405191376e-05, |
|
"loss": 1.8511, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.981982341099795e-05, |
|
"loss": 1.826, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9736777416804524e-05, |
|
"loss": 1.807, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9653731422611102e-05, |
|
"loss": 1.812, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9570685428417678e-05, |
|
"loss": 1.8308, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9487639434224253e-05, |
|
"loss": 1.7933, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9404593440030825e-05, |
|
"loss": 1.8197, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9321547445837403e-05, |
|
"loss": 1.8258, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.923850145164398e-05, |
|
"loss": 1.8341, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9155455457450554e-05, |
|
"loss": 1.8089, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9072409463257133e-05, |
|
"loss": 1.8254, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8989363469063708e-05, |
|
"loss": 1.7734, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.890631747487028e-05, |
|
"loss": 1.811, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8823271480676862e-05, |
|
"loss": 1.775, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8740225486483434e-05, |
|
"loss": 1.863, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.865717949229001e-05, |
|
"loss": 1.8061, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8574133498096588e-05, |
|
"loss": 1.7966, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8491087503903163e-05, |
|
"loss": 1.8331, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.840804150970974e-05, |
|
"loss": 1.7619, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8324995515516317e-05, |
|
"loss": 1.8034, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8241949521322892e-05, |
|
"loss": 1.8151, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8158903527129464e-05, |
|
"loss": 1.817, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8075857532936046e-05, |
|
"loss": 1.8221, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.799281153874262e-05, |
|
"loss": 1.8163, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7909765544549194e-05, |
|
"loss": 1.8357, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.782671955035577e-05, |
|
"loss": 1.7666, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7743673556162348e-05, |
|
"loss": 1.8299, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7660627561968923e-05, |
|
"loss": 1.7589, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7577581567775495e-05, |
|
"loss": 1.7555, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7494535573582077e-05, |
|
"loss": 1.7597, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.741148957938865e-05, |
|
"loss": 1.8411, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7328443585195224e-05, |
|
"loss": 1.8237, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7245397591001803e-05, |
|
"loss": 1.8343, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7162351596808378e-05, |
|
"loss": 1.8008, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.707930560261495e-05, |
|
"loss": 1.7594, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6996259608421532e-05, |
|
"loss": 1.8164, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6913213614228104e-05, |
|
"loss": 1.8277, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.683016762003468e-05, |
|
"loss": 1.7493, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6747121625841258e-05, |
|
"loss": 1.8044, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6664075631647833e-05, |
|
"loss": 1.8229, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.658102963745441e-05, |
|
"loss": 1.8213, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6497983643260987e-05, |
|
"loss": 1.8313, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6414937649067563e-05, |
|
"loss": 1.7616, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6331891654874134e-05, |
|
"loss": 1.7895, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6248845660680717e-05, |
|
"loss": 1.8175, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.616579966648729e-05, |
|
"loss": 1.7599, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6082753672293864e-05, |
|
"loss": 1.789, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.599970767810044e-05, |
|
"loss": 1.7954, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5916661683907018e-05, |
|
"loss": 1.7553, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5833615689713593e-05, |
|
"loss": 1.8179, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5750569695520165e-05, |
|
"loss": 1.794, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5667523701326747e-05, |
|
"loss": 1.7899, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.558447770713332e-05, |
|
"loss": 1.7393, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5501431712939894e-05, |
|
"loss": 1.7655, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5418385718746473e-05, |
|
"loss": 1.8001, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5335339724553048e-05, |
|
"loss": 1.7506, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5252293730359623e-05, |
|
"loss": 1.7704, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5169247736166202e-05, |
|
"loss": 1.7745, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5086201741972774e-05, |
|
"loss": 1.7585, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.500315574777935e-05, |
|
"loss": 1.7453, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4920109753585928e-05, |
|
"loss": 1.7873, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4837063759392503e-05, |
|
"loss": 1.7854, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.475401776519908e-05, |
|
"loss": 1.7683, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4670971771005654e-05, |
|
"loss": 1.8352, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4587925776812233e-05, |
|
"loss": 1.734, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4504879782618805e-05, |
|
"loss": 1.7883, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4421833788425383e-05, |
|
"loss": 1.7501, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.433878779423196e-05, |
|
"loss": 1.8167, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4255741800038534e-05, |
|
"loss": 1.7432, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4172695805845112e-05, |
|
"loss": 1.7148, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4089649811651684e-05, |
|
"loss": 1.7508, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4006603817458263e-05, |
|
"loss": 1.7503, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.392355782326484e-05, |
|
"loss": 1.7762, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3840511829071414e-05, |
|
"loss": 1.7546, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.375746583487799e-05, |
|
"loss": 1.7867, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3674419840684568e-05, |
|
"loss": 1.7745, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.359137384649114e-05, |
|
"loss": 1.7915, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3508327852297718e-05, |
|
"loss": 1.6905, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3425281858104294e-05, |
|
"loss": 1.7627, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.334223586391087e-05, |
|
"loss": 1.7578, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3259189869717447e-05, |
|
"loss": 1.7387, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.317614387552402e-05, |
|
"loss": 1.767, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3093097881330598e-05, |
|
"loss": 1.7562, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3010051887137173e-05, |
|
"loss": 1.7326, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.292700589294375e-05, |
|
"loss": 1.7177, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2843959898750324e-05, |
|
"loss": 1.8186, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2760913904556903e-05, |
|
"loss": 1.756, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2677867910363475e-05, |
|
"loss": 1.7344, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2594821916170053e-05, |
|
"loss": 1.8005, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.251177592197663e-05, |
|
"loss": 1.7913, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2428729927783204e-05, |
|
"loss": 1.7065, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2345683933589783e-05, |
|
"loss": 1.7689, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2262637939396354e-05, |
|
"loss": 1.7677, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2179591945202933e-05, |
|
"loss": 1.7108, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.209654595100951e-05, |
|
"loss": 1.7906, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2013499956816084e-05, |
|
"loss": 1.7315, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.193045396262266e-05, |
|
"loss": 1.7523, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1847407968429238e-05, |
|
"loss": 1.723, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.176436197423581e-05, |
|
"loss": 1.7672, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1681315980042388e-05, |
|
"loss": 1.7129, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1598269985848964e-05, |
|
"loss": 1.6996, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.151522399165554e-05, |
|
"loss": 1.802, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1432177997462118e-05, |
|
"loss": 1.7453, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.134913200326869e-05, |
|
"loss": 1.784, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1266086009075268e-05, |
|
"loss": 1.7123, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1183040014881843e-05, |
|
"loss": 1.7359, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.109999402068842e-05, |
|
"loss": 1.7265, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1016948026494994e-05, |
|
"loss": 1.735, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0933902032301573e-05, |
|
"loss": 1.7794, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0850856038108148e-05, |
|
"loss": 1.7365, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0767810043914723e-05, |
|
"loss": 1.703, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.06847640497213e-05, |
|
"loss": 1.7378, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0601718055527874e-05, |
|
"loss": 1.7197, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0518672061334453e-05, |
|
"loss": 1.7206, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0435626067141024e-05, |
|
"loss": 1.692, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0352580072947603e-05, |
|
"loss": 1.7309, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.026953407875418e-05, |
|
"loss": 1.7436, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0186488084560754e-05, |
|
"loss": 1.7035, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.010344209036733e-05, |
|
"loss": 1.714, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0020396096173904e-05, |
|
"loss": 1.7232, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9937350101980483e-05, |
|
"loss": 1.7051, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.985430410778706e-05, |
|
"loss": 1.7289, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9771258113593634e-05, |
|
"loss": 1.7324, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.968821211940021e-05, |
|
"loss": 1.7407, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9605166125206788e-05, |
|
"loss": 1.7414, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.952212013101336e-05, |
|
"loss": 1.7127, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9439074136819938e-05, |
|
"loss": 1.7178, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9356028142626513e-05, |
|
"loss": 1.7367, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.927298214843309e-05, |
|
"loss": 1.7537, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9189936154239664e-05, |
|
"loss": 1.7151, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.910689016004624e-05, |
|
"loss": 1.6802, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9023844165852818e-05, |
|
"loss": 1.6828, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8940798171659393e-05, |
|
"loss": 1.6664, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.885775217746597e-05, |
|
"loss": 1.6923, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8774706183272544e-05, |
|
"loss": 1.7051, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8691660189079123e-05, |
|
"loss": 1.7262, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8608614194885695e-05, |
|
"loss": 1.7232, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8525568200692273e-05, |
|
"loss": 1.6761, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.844252220649885e-05, |
|
"loss": 1.7318, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8359476212305424e-05, |
|
"loss": 1.7338, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8276430218112e-05, |
|
"loss": 1.725, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8193384223918574e-05, |
|
"loss": 1.6966, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8110338229725153e-05, |
|
"loss": 1.7098, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.802729223553173e-05, |
|
"loss": 1.6887, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7944246241338304e-05, |
|
"loss": 1.6948, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.786120024714488e-05, |
|
"loss": 1.7347, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7778154252951458e-05, |
|
"loss": 1.6812, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.769510825875803e-05, |
|
"loss": 1.7496, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7612062264564608e-05, |
|
"loss": 1.7149, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7529016270371184e-05, |
|
"loss": 1.7519, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.744597027617776e-05, |
|
"loss": 1.6595, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7362924281984334e-05, |
|
"loss": 1.7009, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.727987828779091e-05, |
|
"loss": 1.7369, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7196832293597488e-05, |
|
"loss": 1.6914, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7113786299404063e-05, |
|
"loss": 1.74, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.703074030521064e-05, |
|
"loss": 1.7348, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6947694311017214e-05, |
|
"loss": 1.6941, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6864648316823793e-05, |
|
"loss": 1.6831, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6781602322630365e-05, |
|
"loss": 1.6855, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6698556328436943e-05, |
|
"loss": 1.7032, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.6460107564926147, |
|
"eval_runtime": 62.456, |
|
"eval_samples_per_second": 676.493, |
|
"eval_steps_per_second": 84.572, |
|
"step": 200692 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.661551033424352e-05, |
|
"loss": 1.6659, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6532464340050094e-05, |
|
"loss": 1.7258, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.644941834585667e-05, |
|
"loss": 1.6603, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6366372351663244e-05, |
|
"loss": 1.6643, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6283326357469823e-05, |
|
"loss": 1.67, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.62002803632764e-05, |
|
"loss": 1.7112, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6117234369082974e-05, |
|
"loss": 1.672, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.603418837488955e-05, |
|
"loss": 1.6758, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5951142380696128e-05, |
|
"loss": 1.7029, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.58680963865027e-05, |
|
"loss": 1.673, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.578505039230928e-05, |
|
"loss": 1.684, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5702004398115854e-05, |
|
"loss": 1.6506, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.561895840392243e-05, |
|
"loss": 1.6968, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5535912409729008e-05, |
|
"loss": 1.7132, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.545286641553558e-05, |
|
"loss": 1.7283, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5369820421342158e-05, |
|
"loss": 1.6659, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5286774427148733e-05, |
|
"loss": 1.653, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5203728432955309e-05, |
|
"loss": 1.6335, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5120682438761886e-05, |
|
"loss": 1.661, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.503763644456846e-05, |
|
"loss": 1.6556, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4954590450375036e-05, |
|
"loss": 1.7251, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4871544456181613e-05, |
|
"loss": 1.6816, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4788498461988187e-05, |
|
"loss": 1.6591, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4705452467794764e-05, |
|
"loss": 1.7129, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4622406473601341e-05, |
|
"loss": 1.6808, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4539360479407915e-05, |
|
"loss": 1.6958, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4456314485214492e-05, |
|
"loss": 1.6892, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4373268491021069e-05, |
|
"loss": 1.7093, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4290222496827644e-05, |
|
"loss": 1.669, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.420717650263422e-05, |
|
"loss": 1.6376, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4124130508440794e-05, |
|
"loss": 1.6988, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4041084514247371e-05, |
|
"loss": 1.699, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3958038520053948e-05, |
|
"loss": 1.7098, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3874992525860522e-05, |
|
"loss": 1.6469, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3791946531667099e-05, |
|
"loss": 1.6594, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3708900537473676e-05, |
|
"loss": 1.6656, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.362585454328025e-05, |
|
"loss": 1.6813, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3542808549086827e-05, |
|
"loss": 1.6124, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3459762554893402e-05, |
|
"loss": 1.6395, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3376716560699979e-05, |
|
"loss": 1.7241, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3293670566506556e-05, |
|
"loss": 1.6459, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.321062457231313e-05, |
|
"loss": 1.6462, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3127578578119706e-05, |
|
"loss": 1.65, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3044532583926283e-05, |
|
"loss": 1.6135, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2961486589732857e-05, |
|
"loss": 1.6306, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2878440595539434e-05, |
|
"loss": 1.6445, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2795394601346011e-05, |
|
"loss": 1.6473, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2712348607152586e-05, |
|
"loss": 1.7171, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2629302612959162e-05, |
|
"loss": 1.7322, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2546256618765737e-05, |
|
"loss": 1.6733, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2463210624572314e-05, |
|
"loss": 1.6405, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2380164630378889e-05, |
|
"loss": 1.6611, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2297118636185466e-05, |
|
"loss": 1.6675, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2214072641992041e-05, |
|
"loss": 1.6807, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2131026647798617e-05, |
|
"loss": 1.6737, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2047980653605194e-05, |
|
"loss": 1.6794, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1964934659411769e-05, |
|
"loss": 1.6674, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1881888665218344e-05, |
|
"loss": 1.648, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1798842671024921e-05, |
|
"loss": 1.6825, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1715796676831498e-05, |
|
"loss": 1.6233, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1632750682638074e-05, |
|
"loss": 1.669, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1549704688444649e-05, |
|
"loss": 1.7087, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1466658694251224e-05, |
|
"loss": 1.6436, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1383612700057801e-05, |
|
"loss": 1.6433, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1300566705864376e-05, |
|
"loss": 1.611, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1217520711670952e-05, |
|
"loss": 1.6508, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1134474717477527e-05, |
|
"loss": 1.634, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1051428723284104e-05, |
|
"loss": 1.6744, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.096838272909068e-05, |
|
"loss": 1.6358, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0885336734897256e-05, |
|
"loss": 1.6318, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0802290740703832e-05, |
|
"loss": 1.6344, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0719244746510409e-05, |
|
"loss": 1.6591, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0636198752316984e-05, |
|
"loss": 1.6757, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.055315275812356e-05, |
|
"loss": 1.6628, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0470106763930136e-05, |
|
"loss": 1.6095, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0387060769736711e-05, |
|
"loss": 1.6678, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0304014775543287e-05, |
|
"loss": 1.6727, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0220968781349862e-05, |
|
"loss": 1.6702, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0137922787156439e-05, |
|
"loss": 1.7455, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0054876792963016e-05, |
|
"loss": 1.604, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.971830798769591e-06, |
|
"loss": 1.6657, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.888784804576167e-06, |
|
"loss": 1.6505, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.805738810382744e-06, |
|
"loss": 1.6435, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.722692816189319e-06, |
|
"loss": 1.6541, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.639646821995894e-06, |
|
"loss": 1.6212, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.55660082780247e-06, |
|
"loss": 1.6384, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.473554833609047e-06, |
|
"loss": 1.5894, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.390508839415622e-06, |
|
"loss": 1.6558, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.307462845222197e-06, |
|
"loss": 1.6277, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.224416851028774e-06, |
|
"loss": 1.6418, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.141370856835351e-06, |
|
"loss": 1.5842, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.058324862641926e-06, |
|
"loss": 1.6372, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.975278868448502e-06, |
|
"loss": 1.6669, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.892232874255079e-06, |
|
"loss": 1.617, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.809186880061654e-06, |
|
"loss": 1.6377, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.72614088586823e-06, |
|
"loss": 1.6603, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.643094891674805e-06, |
|
"loss": 1.6087, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.560048897481382e-06, |
|
"loss": 1.6408, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.477002903287957e-06, |
|
"loss": 1.6131, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.393956909094534e-06, |
|
"loss": 1.662, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.310910914901109e-06, |
|
"loss": 1.635, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.227864920707686e-06, |
|
"loss": 1.655, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.144818926514261e-06, |
|
"loss": 1.609, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.061772932320837e-06, |
|
"loss": 1.6342, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.978726938127414e-06, |
|
"loss": 1.6073, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.895680943933989e-06, |
|
"loss": 1.6242, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.812634949740564e-06, |
|
"loss": 1.5926, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.72958895554714e-06, |
|
"loss": 1.5771, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.646542961353717e-06, |
|
"loss": 1.6155, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.563496967160293e-06, |
|
"loss": 1.6359, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.480450972966868e-06, |
|
"loss": 1.6789, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.397404978773443e-06, |
|
"loss": 1.6543, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.31435898458002e-06, |
|
"loss": 1.6069, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.2313129903865964e-06, |
|
"loss": 1.633, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.148266996193172e-06, |
|
"loss": 1.5973, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.065221001999747e-06, |
|
"loss": 1.6708, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.982175007806324e-06, |
|
"loss": 1.5986, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.899129013612899e-06, |
|
"loss": 1.6398, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.8160830194194755e-06, |
|
"loss": 1.6438, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.7330370252260525e-06, |
|
"loss": 1.6719, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.649991031032628e-06, |
|
"loss": 1.6032, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.566945036839203e-06, |
|
"loss": 1.6564, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.483899042645779e-06, |
|
"loss": 1.5988, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.400853048452355e-06, |
|
"loss": 1.5879, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.3178070542589315e-06, |
|
"loss": 1.6442, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.234761060065507e-06, |
|
"loss": 1.664, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.151715065872083e-06, |
|
"loss": 1.622, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.068669071678658e-06, |
|
"loss": 1.6038, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.985623077485235e-06, |
|
"loss": 1.6131, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.9025770832918105e-06, |
|
"loss": 1.6065, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.819531089098387e-06, |
|
"loss": 1.6532, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.736485094904962e-06, |
|
"loss": 1.6297, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.653439100711538e-06, |
|
"loss": 1.6374, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.570393106518114e-06, |
|
"loss": 1.6533, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.48734711232469e-06, |
|
"loss": 1.6394, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.4043011181312665e-06, |
|
"loss": 1.6146, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.321255123937842e-06, |
|
"loss": 1.6221, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.238209129744418e-06, |
|
"loss": 1.5852, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.155163135550994e-06, |
|
"loss": 1.5987, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.07211714135757e-06, |
|
"loss": 1.622, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.9890711471641455e-06, |
|
"loss": 1.6285, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.906025152970722e-06, |
|
"loss": 1.5902, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.822979158777297e-06, |
|
"loss": 1.6129, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.739933164583873e-06, |
|
"loss": 1.6248, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.656887170390449e-06, |
|
"loss": 1.5923, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.573841176197025e-06, |
|
"loss": 1.5975, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.490795182003601e-06, |
|
"loss": 1.5937, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.407749187810177e-06, |
|
"loss": 1.5886, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.324703193616753e-06, |
|
"loss": 1.6767, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.241657199423329e-06, |
|
"loss": 1.5599, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.158611205229905e-06, |
|
"loss": 1.6116, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.0755652110364805e-06, |
|
"loss": 1.6349, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.992519216843057e-06, |
|
"loss": 1.6263, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.909473222649632e-06, |
|
"loss": 1.5866, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.826427228456209e-06, |
|
"loss": 1.5412, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7433812342627843e-06, |
|
"loss": 1.5899, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6603352400693604e-06, |
|
"loss": 1.5569, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5772892458759357e-06, |
|
"loss": 1.5831, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4942432516825123e-06, |
|
"loss": 1.6188, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.4111972574890876e-06, |
|
"loss": 1.6345, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.328151263295664e-06, |
|
"loss": 1.6034, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.2451052691022394e-06, |
|
"loss": 1.6262, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1620592749088156e-06, |
|
"loss": 1.6173, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0790132807153917e-06, |
|
"loss": 1.6379, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9959672865219674e-06, |
|
"loss": 1.5932, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.912921292328543e-06, |
|
"loss": 1.599, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8298752981351193e-06, |
|
"loss": 1.6222, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.746829303941695e-06, |
|
"loss": 1.6023, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.663783309748271e-06, |
|
"loss": 1.6039, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.580737315554847e-06, |
|
"loss": 1.5626, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4976913213614226e-06, |
|
"loss": 1.6007, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.4146453271679987e-06, |
|
"loss": 1.6075, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3315993329745745e-06, |
|
"loss": 1.6126, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.248553338781151e-06, |
|
"loss": 1.5963, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1655073445877267e-06, |
|
"loss": 1.5668, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0824613503943025e-06, |
|
"loss": 1.564, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9994153562008786e-06, |
|
"loss": 1.6204, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.9163693620074543e-06, |
|
"loss": 1.5741, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8333233678140303e-06, |
|
"loss": 1.608, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7502773736206062e-06, |
|
"loss": 1.5894, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6672313794271821e-06, |
|
"loss": 1.5644, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.5841853852337578e-06, |
|
"loss": 1.5474, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5011393910403338e-06, |
|
"loss": 1.6177, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4180933968469097e-06, |
|
"loss": 1.579, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3350474026534856e-06, |
|
"loss": 1.5752, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.2520014084600616e-06, |
|
"loss": 1.5702, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1689554142666375e-06, |
|
"loss": 1.5727, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.0859094200732134e-06, |
|
"loss": 1.695, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0028634258797894e-06, |
|
"loss": 1.608, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.198174316863652e-07, |
|
"loss": 1.6115, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.367714374929411e-07, |
|
"loss": 1.6197, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.53725443299517e-07, |
|
"loss": 1.5447, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.70679449106093e-07, |
|
"loss": 1.6058, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.876334549126688e-07, |
|
"loss": 1.6225, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.045874607192447e-07, |
|
"loss": 1.5389, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.215414665258207e-07, |
|
"loss": 1.5693, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.384954723323966e-07, |
|
"loss": 1.5898, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.554494781389725e-07, |
|
"loss": 1.6425, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.724034839455484e-07, |
|
"loss": 1.6377, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 8.935748975212432e-08, |
|
"loss": 1.5687, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.3114955587002305e-09, |
|
"loss": 1.5879, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.5570189952850342, |
|
"eval_runtime": 63.1399, |
|
"eval_samples_per_second": 669.165, |
|
"eval_steps_per_second": 83.655, |
|
"step": 301038 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 301038, |
|
"total_flos": 5.871271452031616e+16, |
|
"train_loss": 1.8846093698359827, |
|
"train_runtime": 20632.0794, |
|
"train_samples_per_second": 116.725, |
|
"train_steps_per_second": 14.591 |
|
} |
|
], |
|
"max_steps": 301038, |
|
"num_train_epochs": 3, |
|
"total_flos": 5.871271452031616e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|