|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 6.621639517944643, |
|
"eval_steps": 500, |
|
"global_step": 100000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.983445901205139e-05, |
|
"loss": 7.9646, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9668918024102774e-05, |
|
"loss": 7.067, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.950337703615415e-05, |
|
"loss": 6.6489, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.933783604820554e-05, |
|
"loss": 6.4267, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9172295060256924e-05, |
|
"loss": 6.2394, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.900675407230831e-05, |
|
"loss": 6.1117, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.884121308435969e-05, |
|
"loss": 5.9807, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.8675672096411075e-05, |
|
"loss": 5.8532, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.851013110846246e-05, |
|
"loss": 5.7589, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8344590120513846e-05, |
|
"loss": 5.661, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8179049132565225e-05, |
|
"loss": 5.5663, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.801350814461661e-05, |
|
"loss": 5.5025, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7847967156667996e-05, |
|
"loss": 5.4251, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.768242616871938e-05, |
|
"loss": 5.35, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.751688518077076e-05, |
|
"loss": 5.2991, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.735134419282215e-05, |
|
"loss": 5.2476, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.718580320487353e-05, |
|
"loss": 5.1949, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.702026221692492e-05, |
|
"loss": 5.1361, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.68547212289763e-05, |
|
"loss": 5.0988, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.668918024102768e-05, |
|
"loss": 5.0639, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.652363925307907e-05, |
|
"loss": 5.037, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.6358098265130454e-05, |
|
"loss": 4.9896, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.619255727718183e-05, |
|
"loss": 4.9501, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.602701628923322e-05, |
|
"loss": 4.9465, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.58614753012846e-05, |
|
"loss": 4.9, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.569593431333598e-05, |
|
"loss": 4.8633, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.553039332538737e-05, |
|
"loss": 4.8352, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.536485233743875e-05, |
|
"loss": 4.7846, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.5199311349490134e-05, |
|
"loss": 4.775, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.503377036154152e-05, |
|
"loss": 4.7556, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.48682293735929e-05, |
|
"loss": 4.7332, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.4702688385644284e-05, |
|
"loss": 4.7188, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.453714739769567e-05, |
|
"loss": 4.6928, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.4371606409747055e-05, |
|
"loss": 4.6792, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.4206065421798434e-05, |
|
"loss": 4.6542, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.404052443384982e-05, |
|
"loss": 4.6433, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.3874983445901206e-05, |
|
"loss": 4.616, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.370944245795259e-05, |
|
"loss": 4.6071, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.354390147000397e-05, |
|
"loss": 4.5814, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.3378360482055356e-05, |
|
"loss": 4.5617, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.321281949410674e-05, |
|
"loss": 4.5472, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.304727850615813e-05, |
|
"loss": 4.5401, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.2881737518209507e-05, |
|
"loss": 4.5001, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.271619653026089e-05, |
|
"loss": 4.4979, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.255065554231228e-05, |
|
"loss": 4.4717, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.2385114554363664e-05, |
|
"loss": 4.4488, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.221957356641504e-05, |
|
"loss": 4.4499, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.205403257846643e-05, |
|
"loss": 4.4122, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.1888491590517814e-05, |
|
"loss": 4.4016, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.17229506025692e-05, |
|
"loss": 4.3926, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.155740961462058e-05, |
|
"loss": 4.3787, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.1391868626671964e-05, |
|
"loss": 4.379, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.122632763872335e-05, |
|
"loss": 4.3696, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.1060786650774736e-05, |
|
"loss": 4.3436, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.089524566282612e-05, |
|
"loss": 4.3236, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.07297046748775e-05, |
|
"loss": 4.3132, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.0564163686928886e-05, |
|
"loss": 4.3176, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.039862269898027e-05, |
|
"loss": 4.2956, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.023308171103166e-05, |
|
"loss": 4.2884, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.0067540723083036e-05, |
|
"loss": 4.2735, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.990199973513442e-05, |
|
"loss": 4.2596, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.973645874718581e-05, |
|
"loss": 4.2534, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.9570917759237194e-05, |
|
"loss": 4.2429, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.940537677128857e-05, |
|
"loss": 4.2193, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.923983578333996e-05, |
|
"loss": 4.2025, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.9074294795391344e-05, |
|
"loss": 4.1868, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.890875380744273e-05, |
|
"loss": 4.1895, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.874321281949411e-05, |
|
"loss": 4.1681, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.8577671831545494e-05, |
|
"loss": 4.18, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.841213084359688e-05, |
|
"loss": 4.1666, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.8246589855648266e-05, |
|
"loss": 4.136, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.8081048867699645e-05, |
|
"loss": 4.1456, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.791550787975103e-05, |
|
"loss": 4.1194, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.7749966891802416e-05, |
|
"loss": 4.1111, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.7584425903853795e-05, |
|
"loss": 4.092, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.741888491590518e-05, |
|
"loss": 4.0972, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.7253343927956566e-05, |
|
"loss": 4.0929, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.7087802940007945e-05, |
|
"loss": 4.0645, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.692226195205933e-05, |
|
"loss": 4.0618, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.675672096411072e-05, |
|
"loss": 4.0674, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.6591179976162096e-05, |
|
"loss": 4.0467, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.642563898821348e-05, |
|
"loss": 4.0345, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.626009800026487e-05, |
|
"loss": 4.01, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6094557012316246e-05, |
|
"loss": 4.0087, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.592901602436763e-05, |
|
"loss": 4.0112, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.576347503641902e-05, |
|
"loss": 4.0137, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.55979340484704e-05, |
|
"loss": 4.005, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.543239306052178e-05, |
|
"loss": 4.0055, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.526685207257317e-05, |
|
"loss": 3.98, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5101311084624553e-05, |
|
"loss": 3.9646, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.493577009667594e-05, |
|
"loss": 3.9678, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.477022910872732e-05, |
|
"loss": 3.9427, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.4604688120778704e-05, |
|
"loss": 3.9335, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.443914713283009e-05, |
|
"loss": 3.9361, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.4273606144881475e-05, |
|
"loss": 3.9476, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.4108065156932854e-05, |
|
"loss": 3.924, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.394252416898424e-05, |
|
"loss": 3.9141, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.3776983181035626e-05, |
|
"loss": 3.9092, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.361144219308701e-05, |
|
"loss": 3.8767, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.344590120513839e-05, |
|
"loss": 3.9022, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.3280360217189776e-05, |
|
"loss": 3.8732, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.311481922924116e-05, |
|
"loss": 3.8522, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.294927824129255e-05, |
|
"loss": 3.8773, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.2783737253343926e-05, |
|
"loss": 3.8715, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.261819626539531e-05, |
|
"loss": 3.8549, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.24526552774467e-05, |
|
"loss": 3.8547, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.228711428949808e-05, |
|
"loss": 3.8408, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.212157330154946e-05, |
|
"loss": 3.8309, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.195603231360085e-05, |
|
"loss": 3.8275, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.1790491325652234e-05, |
|
"loss": 3.8257, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.162495033770362e-05, |
|
"loss": 3.8233, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.1459409349755005e-05, |
|
"loss": 3.8184, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.1293868361806384e-05, |
|
"loss": 3.7942, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.112832737385777e-05, |
|
"loss": 3.8072, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.0962786385909155e-05, |
|
"loss": 3.787, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.079724539796054e-05, |
|
"loss": 3.7954, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.063170441001192e-05, |
|
"loss": 3.7816, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.0466163422063302e-05, |
|
"loss": 3.7723, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.0300622434114688e-05, |
|
"loss": 3.7713, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.0135081446166074e-05, |
|
"loss": 3.7619, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.9969540458217453e-05, |
|
"loss": 3.7479, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.980399947026884e-05, |
|
"loss": 3.7411, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.9638458482320224e-05, |
|
"loss": 3.7416, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.947291749437161e-05, |
|
"loss": 3.7423, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.930737650642299e-05, |
|
"loss": 3.7344, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.9141835518474375e-05, |
|
"loss": 3.7244, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.897629453052576e-05, |
|
"loss": 3.739, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.8810753542577146e-05, |
|
"loss": 3.7142, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.8645212554628525e-05, |
|
"loss": 3.7158, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.847967156667991e-05, |
|
"loss": 3.7159, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.8314130578731296e-05, |
|
"loss": 3.6938, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 2.8148589590782682e-05, |
|
"loss": 3.692, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.798304860283406e-05, |
|
"loss": 3.6779, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.7817507614885447e-05, |
|
"loss": 3.6856, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.7651966626936832e-05, |
|
"loss": 3.6771, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.7486425638988218e-05, |
|
"loss": 3.672, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.7320884651039597e-05, |
|
"loss": 3.6788, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.7155343663090983e-05, |
|
"loss": 3.6562, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.698980267514237e-05, |
|
"loss": 3.6582, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.682426168719375e-05, |
|
"loss": 3.6605, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.6658720699245133e-05, |
|
"loss": 3.6535, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.649317971129652e-05, |
|
"loss": 3.649, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.63276387233479e-05, |
|
"loss": 3.6455, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.6162097735399287e-05, |
|
"loss": 3.6437, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.599655674745067e-05, |
|
"loss": 3.6402, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.583101575950205e-05, |
|
"loss": 3.6257, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.5665474771553437e-05, |
|
"loss": 3.6125, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.5499933783604823e-05, |
|
"loss": 3.6132, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.5334392795656202e-05, |
|
"loss": 3.612, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.5168851807707588e-05, |
|
"loss": 3.6139, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.5003310819758973e-05, |
|
"loss": 3.5995, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4837769831810356e-05, |
|
"loss": 3.6016, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.467222884386174e-05, |
|
"loss": 3.5882, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.4506687855913124e-05, |
|
"loss": 3.6023, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.434114686796451e-05, |
|
"loss": 3.6025, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.417560588001589e-05, |
|
"loss": 3.5897, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.4010064892067277e-05, |
|
"loss": 3.5767, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.384452390411866e-05, |
|
"loss": 3.5835, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.3678982916170045e-05, |
|
"loss": 3.5579, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.3513441928221428e-05, |
|
"loss": 3.5708, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.3347900940272813e-05, |
|
"loss": 3.5613, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3182359952324196e-05, |
|
"loss": 3.5632, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.301681896437558e-05, |
|
"loss": 3.5688, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.2851277976426967e-05, |
|
"loss": 3.5563, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.268573698847835e-05, |
|
"loss": 3.5438, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.2520196000529735e-05, |
|
"loss": 3.5384, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.2354655012581117e-05, |
|
"loss": 3.5491, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.21891140246325e-05, |
|
"loss": 3.5491, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.2023573036683885e-05, |
|
"loss": 3.5437, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.1858032048735268e-05, |
|
"loss": 3.5335, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.169249106078665e-05, |
|
"loss": 3.5302, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.1526950072838036e-05, |
|
"loss": 3.5227, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.1361409084889418e-05, |
|
"loss": 3.5239, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.1195868096940804e-05, |
|
"loss": 3.5148, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.1030327108992186e-05, |
|
"loss": 3.5282, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.0864786121043572e-05, |
|
"loss": 3.5222, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 2.0699245133094954e-05, |
|
"loss": 3.5179, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.053370414514634e-05, |
|
"loss": 3.4973, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.0368163157197722e-05, |
|
"loss": 3.5072, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.0202622169249108e-05, |
|
"loss": 3.5043, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 2.003708118130049e-05, |
|
"loss": 3.5099, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.9871540193351876e-05, |
|
"loss": 3.4962, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.9705999205403258e-05, |
|
"loss": 3.4934, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.9540458217454644e-05, |
|
"loss": 3.484, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.9374917229506026e-05, |
|
"loss": 3.4683, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.9209376241557412e-05, |
|
"loss": 3.4929, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.9043835253608794e-05, |
|
"loss": 3.4759, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.887829426566018e-05, |
|
"loss": 3.4733, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.8712753277711562e-05, |
|
"loss": 3.4746, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.8547212289762948e-05, |
|
"loss": 3.4734, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.838167130181433e-05, |
|
"loss": 3.4738, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.8216130313865716e-05, |
|
"loss": 3.4768, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.80505893259171e-05, |
|
"loss": 3.4731, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.7885048337968484e-05, |
|
"loss": 3.4659, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.7719507350019866e-05, |
|
"loss": 3.4642, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.755396636207125e-05, |
|
"loss": 3.4595, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.738842537412263e-05, |
|
"loss": 3.4715, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.7222884386174017e-05, |
|
"loss": 3.4457, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.70573433982254e-05, |
|
"loss": 3.4621, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.6891802410276785e-05, |
|
"loss": 3.446, |
|
"step": 100000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 151020, |
|
"num_train_epochs": 10, |
|
"save_steps": 100000, |
|
"total_flos": 1.2639573579451392e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|