|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 29.0, |
|
"global_step": 98252, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-09, |
|
"loss": 10.5588, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.5e-06, |
|
"loss": 9.4906, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5e-06, |
|
"loss": 7.9572, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.5e-06, |
|
"loss": 7.0139, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1e-05, |
|
"loss": 6.7454, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.25e-05, |
|
"loss": 6.5877, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5e-05, |
|
"loss": 6.4748, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.75e-05, |
|
"loss": 6.3744, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2e-05, |
|
"loss": 6.2984, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.25e-05, |
|
"loss": 6.2341, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.1767, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 6.1274, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3e-05, |
|
"loss": 6.0823, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 6.0421, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.5e-05, |
|
"loss": 6.0017, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 5.9722, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4e-05, |
|
"loss": 5.9385, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.2495e-05, |
|
"loss": 5.9107, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.4995000000000005e-05, |
|
"loss": 5.8896, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.7495e-05, |
|
"loss": 5.8665, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.9995000000000005e-05, |
|
"loss": 5.8475, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.998148974130241e-05, |
|
"loss": 5.8261, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.9962905144216476e-05, |
|
"loss": 5.8092, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.994432054713054e-05, |
|
"loss": 5.7955, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.9925735950044606e-05, |
|
"loss": 5.7808, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.990718852215284e-05, |
|
"loss": 5.7663, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.9888603925066905e-05, |
|
"loss": 5.758, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.987001932798097e-05, |
|
"loss": 5.7457, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 4.9851434730895035e-05, |
|
"loss": 5.7345, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.98328501338091e-05, |
|
"loss": 5.7287, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.981430270591734e-05, |
|
"loss": 5.7177, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.97957181088314e-05, |
|
"loss": 5.7086, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.9777133511745463e-05, |
|
"loss": 5.7048, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 4.975854891465953e-05, |
|
"loss": 5.6978, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 4.974000148676777e-05, |
|
"loss": 5.6939, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 4.9721416889681834e-05, |
|
"loss": 5.6829, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 4.97028322925959e-05, |
|
"loss": 5.6763, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 4.9684247695509964e-05, |
|
"loss": 5.6745, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 4.96657002676182e-05, |
|
"loss": 5.6688, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 4.964711567053226e-05, |
|
"loss": 5.6605, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 4.96285682426405e-05, |
|
"loss": 5.6596, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 4.960998364555456e-05, |
|
"loss": 5.6507, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 4.959139904846863e-05, |
|
"loss": 5.6504, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 4.95728144513827e-05, |
|
"loss": 5.6443, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 4.955426702349094e-05, |
|
"loss": 5.641, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 4.9535682426405e-05, |
|
"loss": 5.6369, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 4.951709782931906e-05, |
|
"loss": 5.6307, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 4.9498513232233127e-05, |
|
"loss": 5.6267, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 4.947992863514719e-05, |
|
"loss": 5.6207, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 4.946134403806126e-05, |
|
"loss": 5.6199, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 4.94427966101695e-05, |
|
"loss": 5.6176, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 4.942421201308356e-05, |
|
"loss": 5.6159, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 4.940562741599763e-05, |
|
"loss": 5.6105, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 4.9387042818911685e-05, |
|
"loss": 5.6074, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 4.936845822182575e-05, |
|
"loss": 5.6082, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 4.9349873624739815e-05, |
|
"loss": 5.6027, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 4.9331326196848056e-05, |
|
"loss": 5.598, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 4.931274159976212e-05, |
|
"loss": 5.5981, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 4.9294157002676186e-05, |
|
"loss": 5.5927, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 4.927557240559025e-05, |
|
"loss": 5.5912, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 4.925698780850431e-05, |
|
"loss": 5.5881, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 4.9238403211418374e-05, |
|
"loss": 5.5843, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.921981861433244e-05, |
|
"loss": 5.584, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 4.9201234017246504e-05, |
|
"loss": 5.5809, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 4.918272375854891e-05, |
|
"loss": 5.5765, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 4.916413916146298e-05, |
|
"loss": 5.575, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 4.914555456437704e-05, |
|
"loss": 5.5707, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 4.912696996729111e-05, |
|
"loss": 5.5738, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 4.910838537020518e-05, |
|
"loss": 5.5686, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 4.9089800773119245e-05, |
|
"loss": 5.5673, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 4.907121617603331e-05, |
|
"loss": 5.5328, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 4.9052631578947375e-05, |
|
"loss": 5.2919, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 4.9034046981861434e-05, |
|
"loss": 5.1132, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 4.9015499553969674e-05, |
|
"loss": 4.9545, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 4.899691495688374e-05, |
|
"loss": 4.7923, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 4.8978330359797804e-05, |
|
"loss": 4.6472, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"learning_rate": 4.895974576271187e-05, |
|
"loss": 4.508, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 4.89411983348201e-05, |
|
"loss": 4.3801, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 4.892261373773417e-05, |
|
"loss": 4.2399, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 4.890402914064823e-05, |
|
"loss": 4.0865, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 4.88854445435623e-05, |
|
"loss": 3.7505, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 4.886689711567054e-05, |
|
"loss": 3.1766, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 4.8848312518584596e-05, |
|
"loss": 2.7324, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 4.8829765090692836e-05, |
|
"loss": 2.5016, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 4.88111804936069e-05, |
|
"loss": 2.352, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 4.8792595896520966e-05, |
|
"loss": 2.247, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 4.877401129943503e-05, |
|
"loss": 2.1678, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 4.87554267023491e-05, |
|
"loss": 2.095, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 4.873687927445733e-05, |
|
"loss": 2.0451, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 13.13, |
|
"learning_rate": 4.8718294677371395e-05, |
|
"loss": 1.992, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 4.869971008028546e-05, |
|
"loss": 1.9523, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 13.43, |
|
"learning_rate": 4.8681125483199525e-05, |
|
"loss": 1.9121, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"learning_rate": 4.866254088611359e-05, |
|
"loss": 1.8785, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 4.8643993458221824e-05, |
|
"loss": 1.8475, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 4.862540886113589e-05, |
|
"loss": 1.8158, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"learning_rate": 4.860682426404996e-05, |
|
"loss": 1.7896, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 4.8588239666964026e-05, |
|
"loss": 1.7639, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 4.856965506987809e-05, |
|
"loss": 1.7434, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 14.46, |
|
"learning_rate": 4.8551107641986324e-05, |
|
"loss": 1.7184, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 4.853252304490039e-05, |
|
"loss": 1.7003, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 14.76, |
|
"learning_rate": 4.8513938447814454e-05, |
|
"loss": 1.6781, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 14.91, |
|
"learning_rate": 4.849535385072852e-05, |
|
"loss": 1.6588, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 4.847680642283676e-05, |
|
"loss": 1.6461, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"learning_rate": 4.845822182575082e-05, |
|
"loss": 1.627, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"learning_rate": 4.843963722866488e-05, |
|
"loss": 1.6137, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 4.842105263157895e-05, |
|
"loss": 1.5991, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 15.64, |
|
"learning_rate": 4.840246803449301e-05, |
|
"loss": 1.5868, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 15.79, |
|
"learning_rate": 4.8383920606601253e-05, |
|
"loss": 1.5735, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 15.94, |
|
"learning_rate": 4.836533600951532e-05, |
|
"loss": 1.5621, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 16.09, |
|
"learning_rate": 4.8346751412429384e-05, |
|
"loss": 1.546, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 16.23, |
|
"learning_rate": 4.832816681534345e-05, |
|
"loss": 1.5339, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 4.830961938745168e-05, |
|
"loss": 1.5267, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 4.829103479036575e-05, |
|
"loss": 1.5158, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"learning_rate": 4.827245019327981e-05, |
|
"loss": 1.5052, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"learning_rate": 4.8253902765388046e-05, |
|
"loss": 1.4964, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 16.97, |
|
"learning_rate": 4.823531816830211e-05, |
|
"loss": 1.4859, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 4.8216733571216176e-05, |
|
"loss": 1.4758, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"learning_rate": 4.819814897413024e-05, |
|
"loss": 1.4669, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 17.41, |
|
"learning_rate": 4.8179564377044306e-05, |
|
"loss": 1.4597, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"learning_rate": 4.8161016949152546e-05, |
|
"loss": 1.4498, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 4.8142432352066604e-05, |
|
"loss": 1.4422, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"learning_rate": 4.812384775498067e-05, |
|
"loss": 1.4347, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 4.8105263157894735e-05, |
|
"loss": 1.4253, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 18.15, |
|
"learning_rate": 4.8086678560808806e-05, |
|
"loss": 1.4161, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"learning_rate": 4.806813113291704e-05, |
|
"loss": 1.4107, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 18.45, |
|
"learning_rate": 4.8049546535831105e-05, |
|
"loss": 1.4052, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 18.6, |
|
"learning_rate": 4.803096193874517e-05, |
|
"loss": 1.3986, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 18.74, |
|
"learning_rate": 4.8012377341659235e-05, |
|
"loss": 1.3891, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"learning_rate": 4.7993829913767475e-05, |
|
"loss": 1.3862, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"learning_rate": 4.797524531668154e-05, |
|
"loss": 1.3764, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"learning_rate": 4.7956660719595605e-05, |
|
"loss": 1.3687, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 19.33, |
|
"learning_rate": 4.793807612250967e-05, |
|
"loss": 1.3658, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 4.791949152542373e-05, |
|
"loss": 1.3577, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"learning_rate": 4.790094409753197e-05, |
|
"loss": 1.3526, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 19.78, |
|
"learning_rate": 4.7882359500446034e-05, |
|
"loss": 1.3471, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 19.92, |
|
"learning_rate": 4.78637749033601e-05, |
|
"loss": 1.3417, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 20.07, |
|
"learning_rate": 4.7845190306274164e-05, |
|
"loss": 1.3349, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 20.22, |
|
"learning_rate": 4.782668004757657e-05, |
|
"loss": 1.3294, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 20.37, |
|
"learning_rate": 4.780809545049064e-05, |
|
"loss": 1.3268, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 20.51, |
|
"learning_rate": 4.77895108534047e-05, |
|
"loss": 1.3201, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 20.66, |
|
"learning_rate": 4.777092625631877e-05, |
|
"loss": 1.3142, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 20.81, |
|
"learning_rate": 4.775234165923283e-05, |
|
"loss": 1.3085, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"learning_rate": 4.773375706214689e-05, |
|
"loss": 1.3043, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 21.1, |
|
"learning_rate": 4.7715172465060956e-05, |
|
"loss": 1.2965, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 21.25, |
|
"learning_rate": 4.76966250371692e-05, |
|
"loss": 1.2928, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 21.4, |
|
"learning_rate": 4.767804044008326e-05, |
|
"loss": 1.2882, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 21.55, |
|
"learning_rate": 4.765945584299733e-05, |
|
"loss": 1.2852, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 21.69, |
|
"learning_rate": 4.764087124591139e-05, |
|
"loss": 1.2786, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 21.84, |
|
"learning_rate": 4.762228664882546e-05, |
|
"loss": 1.2759, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 21.99, |
|
"learning_rate": 4.7603702051739515e-05, |
|
"loss": 1.2722, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 22.14, |
|
"learning_rate": 4.758511745465358e-05, |
|
"loss": 1.2661, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 22.28, |
|
"learning_rate": 4.756657002676182e-05, |
|
"loss": 1.2615, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 22.43, |
|
"learning_rate": 4.7547985429675886e-05, |
|
"loss": 1.2571, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 22.58, |
|
"learning_rate": 4.752940083258996e-05, |
|
"loss": 1.2531, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"learning_rate": 4.7510816235504016e-05, |
|
"loss": 1.2517, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 22.87, |
|
"learning_rate": 4.749223163841808e-05, |
|
"loss": 1.2477, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 23.02, |
|
"learning_rate": 4.7473647041332146e-05, |
|
"loss": 1.2422, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 23.17, |
|
"learning_rate": 4.745506244424621e-05, |
|
"loss": 1.2389, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 23.32, |
|
"learning_rate": 4.7436477847160276e-05, |
|
"loss": 1.2372, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 23.47, |
|
"learning_rate": 4.7417930419268516e-05, |
|
"loss": 1.2314, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 23.61, |
|
"learning_rate": 4.739938299137675e-05, |
|
"loss": 1.2282, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 23.76, |
|
"learning_rate": 4.7380798394290815e-05, |
|
"loss": 1.2249, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 23.91, |
|
"learning_rate": 4.736221379720488e-05, |
|
"loss": 1.2211, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 24.06, |
|
"learning_rate": 4.7343629200118945e-05, |
|
"loss": 1.22, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 24.2, |
|
"learning_rate": 4.732508177222718e-05, |
|
"loss": 1.2131, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 24.35, |
|
"learning_rate": 4.730649717514124e-05, |
|
"loss": 1.2093, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 24.5, |
|
"learning_rate": 4.728791257805531e-05, |
|
"loss": 1.2078, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 24.65, |
|
"learning_rate": 4.7269327980969373e-05, |
|
"loss": 1.2069, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 24.79, |
|
"learning_rate": 4.725074338388344e-05, |
|
"loss": 1.2037, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 24.94, |
|
"learning_rate": 4.7232158786797504e-05, |
|
"loss": 1.2007, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 25.09, |
|
"learning_rate": 4.7213611358905744e-05, |
|
"loss": 1.1953, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 25.24, |
|
"learning_rate": 4.71950267618198e-05, |
|
"loss": 1.193, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 25.38, |
|
"learning_rate": 4.717644216473387e-05, |
|
"loss": 1.1923, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 25.53, |
|
"learning_rate": 4.715785756764793e-05, |
|
"loss": 1.1889, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 25.68, |
|
"learning_rate": 4.713931013975617e-05, |
|
"loss": 1.1844, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 25.83, |
|
"learning_rate": 4.712072554267024e-05, |
|
"loss": 1.1819, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 25.97, |
|
"learning_rate": 4.71021409455843e-05, |
|
"loss": 1.1791, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 26.12, |
|
"learning_rate": 4.708355634849837e-05, |
|
"loss": 1.1767, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 26.27, |
|
"learning_rate": 4.7064971751412426e-05, |
|
"loss": 1.1728, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 26.42, |
|
"learning_rate": 4.70463871543265e-05, |
|
"loss": 1.1702, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 26.56, |
|
"learning_rate": 4.702780255724056e-05, |
|
"loss": 1.1686, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 26.71, |
|
"learning_rate": 4.70092551293488e-05, |
|
"loss": 1.1677, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 26.86, |
|
"learning_rate": 4.699067053226287e-05, |
|
"loss": 1.1649, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 27.01, |
|
"learning_rate": 4.6972085935176926e-05, |
|
"loss": 1.1625, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 27.15, |
|
"learning_rate": 4.695350133809099e-05, |
|
"loss": 1.1587, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"learning_rate": 4.6934916741005057e-05, |
|
"loss": 1.1565, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 27.45, |
|
"learning_rate": 4.691633214391912e-05, |
|
"loss": 1.156, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 27.6, |
|
"learning_rate": 4.689774754683319e-05, |
|
"loss": 1.1515, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 27.74, |
|
"learning_rate": 4.687920011894143e-05, |
|
"loss": 1.1487, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 27.89, |
|
"learning_rate": 4.686061552185549e-05, |
|
"loss": 1.1495, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 28.04, |
|
"learning_rate": 4.684203092476955e-05, |
|
"loss": 1.1444, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 28.19, |
|
"learning_rate": 4.6823446327683615e-05, |
|
"loss": 1.1424, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 28.34, |
|
"learning_rate": 4.680486173059768e-05, |
|
"loss": 1.1396, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 28.48, |
|
"learning_rate": 4.6786277133511746e-05, |
|
"loss": 1.1387, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 28.63, |
|
"learning_rate": 4.676769253642581e-05, |
|
"loss": 1.1351, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 28.78, |
|
"learning_rate": 4.6749107939339876e-05, |
|
"loss": 1.134, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 28.93, |
|
"learning_rate": 4.6730560511448116e-05, |
|
"loss": 1.1338, |
|
"step": 98000 |
|
} |
|
], |
|
"max_steps": 1355200, |
|
"num_train_epochs": 400, |
|
"total_flos": 2.648101887798988e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|