|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 123750, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.97979797979798e-05, |
|
"loss": 1.9971, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9595959595959594e-05, |
|
"loss": 1.9127, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.93939393939394e-05, |
|
"loss": 1.822, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.919191919191919e-05, |
|
"loss": 1.799, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.898989898989899e-05, |
|
"loss": 1.7302, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.878787878787879e-05, |
|
"loss": 1.7547, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.858585858585859e-05, |
|
"loss": 1.7697, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.838383838383839e-05, |
|
"loss": 1.729, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8181818181818186e-05, |
|
"loss": 1.7333, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.797979797979798e-05, |
|
"loss": 1.7238, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7777777777777784e-05, |
|
"loss": 1.678, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7575757575757576e-05, |
|
"loss": 1.6989, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7373737373737375e-05, |
|
"loss": 1.7389, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7171717171717174e-05, |
|
"loss": 1.6863, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.696969696969697e-05, |
|
"loss": 1.6846, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.676767676767677e-05, |
|
"loss": 1.6653, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.656565656565657e-05, |
|
"loss": 1.6418, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.636363636363636e-05, |
|
"loss": 1.6847, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.616161616161616e-05, |
|
"loss": 1.6139, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.595959595959596e-05, |
|
"loss": 1.632, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.575757575757576e-05, |
|
"loss": 1.6174, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.555555555555556e-05, |
|
"loss": 1.6112, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.535353535353535e-05, |
|
"loss": 1.621, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.515151515151516e-05, |
|
"loss": 1.635, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.494949494949495e-05, |
|
"loss": 1.6097, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.474747474747475e-05, |
|
"loss": 1.6452, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.454545454545455e-05, |
|
"loss": 1.6256, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4343434343434346e-05, |
|
"loss": 1.5971, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4141414141414145e-05, |
|
"loss": 1.6227, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3939393939393944e-05, |
|
"loss": 1.5739, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3737373737373736e-05, |
|
"loss": 1.5691, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3535353535353535e-05, |
|
"loss": 1.5546, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.5823, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.313131313131313e-05, |
|
"loss": 1.5551, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.292929292929293e-05, |
|
"loss": 1.5894, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2727272727272724e-05, |
|
"loss": 1.5619, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.252525252525253e-05, |
|
"loss": 1.5391, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.232323232323233e-05, |
|
"loss": 1.5915, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.212121212121212e-05, |
|
"loss": 1.5544, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.191919191919192e-05, |
|
"loss": 1.5526, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.171717171717172e-05, |
|
"loss": 1.6034, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.151515151515152e-05, |
|
"loss": 1.5726, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.131313131313132e-05, |
|
"loss": 1.5359, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.111111111111111e-05, |
|
"loss": 1.5478, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0909090909090915e-05, |
|
"loss": 1.5536, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.070707070707071e-05, |
|
"loss": 1.5392, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0505050505050506e-05, |
|
"loss": 1.5625, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0303030303030305e-05, |
|
"loss": 1.5591, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.01010101010101e-05, |
|
"loss": 1.5213, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.98989898989899e-05, |
|
"loss": 1.5745, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.96969696969697e-05, |
|
"loss": 1.5482, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9494949494949494e-05, |
|
"loss": 1.5316, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.929292929292929e-05, |
|
"loss": 1.5337, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.909090909090909e-05, |
|
"loss": 1.4942, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 1.5151, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.868686868686869e-05, |
|
"loss": 1.5496, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.848484848484848e-05, |
|
"loss": 1.5163, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.828282828282829e-05, |
|
"loss": 1.5256, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8080808080808087e-05, |
|
"loss": 1.4599, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.787878787878788e-05, |
|
"loss": 1.4989, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.767676767676768e-05, |
|
"loss": 1.5055, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.747474747474748e-05, |
|
"loss": 1.5022, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7272727272727276e-05, |
|
"loss": 1.4822, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7070707070707075e-05, |
|
"loss": 1.5122, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.686868686868687e-05, |
|
"loss": 1.4803, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.4926, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6464646464646465e-05, |
|
"loss": 1.4901, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6262626262626264e-05, |
|
"loss": 1.4719, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.606060606060606e-05, |
|
"loss": 1.4807, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5858585858585855e-05, |
|
"loss": 1.5062, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.565656565656566e-05, |
|
"loss": 1.5005, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.545454545454546e-05, |
|
"loss": 1.4907, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.525252525252525e-05, |
|
"loss": 1.4665, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.505050505050505e-05, |
|
"loss": 1.4935, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.484848484848485e-05, |
|
"loss": 1.4598, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.464646464646465e-05, |
|
"loss": 1.4724, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.444444444444445e-05, |
|
"loss": 1.4927, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.424242424242424e-05, |
|
"loss": 1.4694, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4040404040404045e-05, |
|
"loss": 1.479, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3838383838383844e-05, |
|
"loss": 1.5048, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3636363636363636e-05, |
|
"loss": 1.4738, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3434343434343435e-05, |
|
"loss": 1.4608, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3232323232323234e-05, |
|
"loss": 1.3329, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.303030303030303e-05, |
|
"loss": 1.3101, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.282828282828283e-05, |
|
"loss": 1.2492, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2626262626262624e-05, |
|
"loss": 1.2951, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2424242424242423e-05, |
|
"loss": 1.2733, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.222222222222223e-05, |
|
"loss": 1.2937, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.202020202020202e-05, |
|
"loss": 1.2735, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.181818181818182e-05, |
|
"loss": 1.2602, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.161616161616161e-05, |
|
"loss": 1.2623, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.141414141414142e-05, |
|
"loss": 1.2759, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.121212121212122e-05, |
|
"loss": 1.2684, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.101010101010101e-05, |
|
"loss": 1.2501, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.080808080808081e-05, |
|
"loss": 1.3255, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.060606060606061e-05, |
|
"loss": 1.2538, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0404040404040406e-05, |
|
"loss": 1.2527, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0202020202020205e-05, |
|
"loss": 1.2417, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3e-05, |
|
"loss": 1.2679, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9797979797979796e-05, |
|
"loss": 1.2634, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.95959595959596e-05, |
|
"loss": 1.2838, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9393939393939394e-05, |
|
"loss": 1.2911, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9191919191919193e-05, |
|
"loss": 1.2957, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.898989898989899e-05, |
|
"loss": 1.2989, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.878787878787879e-05, |
|
"loss": 1.2837, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8585858585858587e-05, |
|
"loss": 1.2799, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8383838383838386e-05, |
|
"loss": 1.3081, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.818181818181818e-05, |
|
"loss": 1.2611, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7979797979797984e-05, |
|
"loss": 1.2629, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 1.2824, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7575757575757578e-05, |
|
"loss": 1.2779, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7373737373737374e-05, |
|
"loss": 1.2509, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.717171717171717e-05, |
|
"loss": 1.2767, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.696969696969697e-05, |
|
"loss": 1.2694, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.676767676767677e-05, |
|
"loss": 1.2863, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6565656565656566e-05, |
|
"loss": 1.2726, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.636363636363636e-05, |
|
"loss": 1.2869, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6161616161616164e-05, |
|
"loss": 1.2701, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5959595959595963e-05, |
|
"loss": 1.2943, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.575757575757576e-05, |
|
"loss": 1.2579, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5555555555555554e-05, |
|
"loss": 1.2592, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5353535353535356e-05, |
|
"loss": 1.2633, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5151515151515155e-05, |
|
"loss": 1.2764, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.494949494949495e-05, |
|
"loss": 1.2795, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.474747474747475e-05, |
|
"loss": 1.2626, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4545454545454545e-05, |
|
"loss": 1.2653, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4343434343434344e-05, |
|
"loss": 1.277, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4141414141414143e-05, |
|
"loss": 1.271, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.393939393939394e-05, |
|
"loss": 1.3027, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3737373737373738e-05, |
|
"loss": 1.2827, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3535353535353537e-05, |
|
"loss": 1.2849, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.244, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.313131313131313e-05, |
|
"loss": 1.2626, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.292929292929293e-05, |
|
"loss": 1.224, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 1.2652, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2525252525252528e-05, |
|
"loss": 1.2503, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2323232323232324e-05, |
|
"loss": 1.2417, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2121212121212123e-05, |
|
"loss": 1.285, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.191919191919192e-05, |
|
"loss": 1.2628, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.171717171717172e-05, |
|
"loss": 1.2363, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1515151515151516e-05, |
|
"loss": 1.2178, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1313131313131315e-05, |
|
"loss": 1.2634, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.111111111111111e-05, |
|
"loss": 1.2865, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.090909090909091e-05, |
|
"loss": 1.263, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.070707070707071e-05, |
|
"loss": 1.2734, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0505050505050504e-05, |
|
"loss": 1.2881, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0303030303030303e-05, |
|
"loss": 1.2164, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0101010101010102e-05, |
|
"loss": 1.2381, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.98989898989899e-05, |
|
"loss": 1.2552, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9696969696969697e-05, |
|
"loss": 1.2883, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9494949494949496e-05, |
|
"loss": 1.238, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9292929292929295e-05, |
|
"loss": 1.2566, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 1.2648, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 1.2749, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8686868686868688e-05, |
|
"loss": 1.2534, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8484848484848487e-05, |
|
"loss": 1.2511, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8282828282828286e-05, |
|
"loss": 1.2808, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.808080808080808e-05, |
|
"loss": 1.2687, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.787878787878788e-05, |
|
"loss": 1.2495, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7676767676767676e-05, |
|
"loss": 1.2671, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7474747474747475e-05, |
|
"loss": 1.2517, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 1.2058, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.707070707070707e-05, |
|
"loss": 1.2215, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.686868686868687e-05, |
|
"loss": 1.2407, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.2347, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6464646464646466e-05, |
|
"loss": 1.0606, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6262626262626262e-05, |
|
"loss": 1.0993, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.606060606060606e-05, |
|
"loss": 1.0871, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.585858585858586e-05, |
|
"loss": 1.0975, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.565656565656566e-05, |
|
"loss": 1.098, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 1.105, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5252525252525255e-05, |
|
"loss": 1.1159, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.505050505050505e-05, |
|
"loss": 1.1012, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.484848484848485e-05, |
|
"loss": 1.0811, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4646464646464647e-05, |
|
"loss": 1.0882, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4444444444444444e-05, |
|
"loss": 1.0659, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4242424242424243e-05, |
|
"loss": 1.0932, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.404040404040404e-05, |
|
"loss": 1.0818, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.383838383838384e-05, |
|
"loss": 1.096, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 1.0844, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3434343434343436e-05, |
|
"loss": 1.0893, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3232323232323233e-05, |
|
"loss": 1.1083, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3030303030303032e-05, |
|
"loss": 1.0416, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2828282828282829e-05, |
|
"loss": 1.1267, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2626262626262628e-05, |
|
"loss": 1.0787, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2424242424242424e-05, |
|
"loss": 1.0863, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2222222222222222e-05, |
|
"loss": 1.0955, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.202020202020202e-05, |
|
"loss": 1.0723, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1818181818181819e-05, |
|
"loss": 1.107, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1616161616161616e-05, |
|
"loss": 1.0941, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1414141414141415e-05, |
|
"loss": 1.0638, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1212121212121212e-05, |
|
"loss": 1.0862, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1010101010101011e-05, |
|
"loss": 1.0879, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0808080808080808e-05, |
|
"loss": 1.1124, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0606060606060607e-05, |
|
"loss": 1.0761, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0404040404040405e-05, |
|
"loss": 1.0931, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0202020202020204e-05, |
|
"loss": 1.0835, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1e-05, |
|
"loss": 1.0746, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.7979797979798e-06, |
|
"loss": 1.099, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.595959595959595e-06, |
|
"loss": 1.1072, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.393939393939394e-06, |
|
"loss": 1.0603, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.191919191919192e-06, |
|
"loss": 1.0652, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.98989898989899e-06, |
|
"loss": 1.0734, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 1.0666, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.585858585858587e-06, |
|
"loss": 1.0672, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.383838383838384e-06, |
|
"loss": 1.0576, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 1.0902, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.97979797979798e-06, |
|
"loss": 1.0693, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 1.0336, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 1.0752, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.3737373737373745e-06, |
|
"loss": 1.0896, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.171717171717173e-06, |
|
"loss": 1.0674, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 1.0682, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.767676767676769e-06, |
|
"loss": 1.0851, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.565656565656567e-06, |
|
"loss": 1.0671, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.363636363636363e-06, |
|
"loss": 1.1211, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.161616161616162e-06, |
|
"loss": 1.0873, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.9595959595959605e-06, |
|
"loss": 1.0913, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.7575757575757586e-06, |
|
"loss": 1.113, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 1.0346, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.353535353535354e-06, |
|
"loss": 1.1079, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.151515151515152e-06, |
|
"loss": 1.0749, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.949494949494949e-06, |
|
"loss": 1.0824, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.747474747474747e-06, |
|
"loss": 1.0841, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.0914, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.343434343434344e-06, |
|
"loss": 1.0605, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.141414141414142e-06, |
|
"loss": 1.0965, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.939393939393939e-06, |
|
"loss": 1.0902, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7373737373737375e-06, |
|
"loss": 1.0711, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5353535353535352e-06, |
|
"loss": 1.1054, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.0563, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1313131313131314e-06, |
|
"loss": 1.0524, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9292929292929295e-06, |
|
"loss": 1.0564, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 1.0935, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5252525252525253e-06, |
|
"loss": 1.0868, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3232323232323234e-06, |
|
"loss": 1.0526, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1212121212121216e-06, |
|
"loss": 1.0539, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9191919191919192e-06, |
|
"loss": 1.0605, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7171717171717171e-06, |
|
"loss": 1.0406, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 1.0582, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3131313131313131e-06, |
|
"loss": 1.0763, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1111111111111112e-06, |
|
"loss": 1.0838, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.09090909090909e-07, |
|
"loss": 1.0547, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.070707070707071e-07, |
|
"loss": 1.07, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.050505050505052e-07, |
|
"loss": 1.0643, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.0303030303030305e-07, |
|
"loss": 1.0826, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.0101010101010101e-07, |
|
"loss": 1.0529, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 123750, |
|
"total_flos": 14680805909643264, |
|
"train_runtime": 13647.3004, |
|
"train_samples_per_second": 9.068 |
|
} |
|
], |
|
"max_steps": 123750, |
|
"num_train_epochs": 3, |
|
"total_flos": 14680805909643264, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|