|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.15017728468489, |
|
"global_step": 80000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.97140569598536e-05, |
|
"loss": 8.5722, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.94281139197072e-05, |
|
"loss": 7.9252, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.91421708795608e-05, |
|
"loss": 7.6163, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.885622783941439e-05, |
|
"loss": 7.3317, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.857028479926799e-05, |
|
"loss": 6.9857, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.8284341759121586e-05, |
|
"loss": 6.6297, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7998398718975185e-05, |
|
"loss": 6.3251, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.7712455678828777e-05, |
|
"loss": 6.0786, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.7426512638682375e-05, |
|
"loss": 5.8383, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.7140569598535973e-05, |
|
"loss": 5.62, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.685462655838957e-05, |
|
"loss": 5.4495, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.6568683518243164e-05, |
|
"loss": 5.2876, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.628274047809676e-05, |
|
"loss": 5.1378, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.599679743795036e-05, |
|
"loss": 5.0281, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.571085439780396e-05, |
|
"loss": 4.9177, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.542491135765756e-05, |
|
"loss": 4.8254, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5138968317511156e-05, |
|
"loss": 4.7354, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.4853025277364754e-05, |
|
"loss": 4.6366, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.4567082237218346e-05, |
|
"loss": 4.5642, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.4281139197071944e-05, |
|
"loss": 4.5011, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.399519615692554e-05, |
|
"loss": 4.4285, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.370925311677914e-05, |
|
"loss": 4.3703, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.342331007663274e-05, |
|
"loss": 4.3203, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.313736703648633e-05, |
|
"loss": 4.2678, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.285142399633993e-05, |
|
"loss": 4.2177, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.256548095619353e-05, |
|
"loss": 4.1607, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.227953791604713e-05, |
|
"loss": 4.132, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.199359487590072e-05, |
|
"loss": 4.0738, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.170765183575432e-05, |
|
"loss": 4.0348, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.1421708795607916e-05, |
|
"loss": 3.9979, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.1135765755461514e-05, |
|
"loss": 3.9553, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.084982271531511e-05, |
|
"loss": 3.917, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.056387967516871e-05, |
|
"loss": 3.8866, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.027793663502231e-05, |
|
"loss": 3.8605, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.99919935948759e-05, |
|
"loss": 3.8163, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.97060505547295e-05, |
|
"loss": 3.7707, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.94201075145831e-05, |
|
"loss": 3.7489, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.9134164474436696e-05, |
|
"loss": 3.7237, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.8848221434290295e-05, |
|
"loss": 3.6926, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.8562278394143887e-05, |
|
"loss": 3.6643, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.8276335353997485e-05, |
|
"loss": 3.6353, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3.7990392313851084e-05, |
|
"loss": 3.6286, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.7704449273704675e-05, |
|
"loss": 3.6052, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.7418506233558274e-05, |
|
"loss": 3.5797, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.713256319341187e-05, |
|
"loss": 3.5624, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.684662015326547e-05, |
|
"loss": 3.5298, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.656067711311907e-05, |
|
"loss": 3.5125, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.627473407297267e-05, |
|
"loss": 3.4832, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.5988791032826266e-05, |
|
"loss": 3.4755, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.570284799267986e-05, |
|
"loss": 3.4514, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.5416904952533456e-05, |
|
"loss": 3.441, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.5130961912387055e-05, |
|
"loss": 3.4208, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.484501887224065e-05, |
|
"loss": 3.3992, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.455907583209425e-05, |
|
"loss": 3.3641, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.427313279194784e-05, |
|
"loss": 3.3579, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.398718975180144e-05, |
|
"loss": 3.3453, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.370124671165504e-05, |
|
"loss": 3.3162, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.341530367150864e-05, |
|
"loss": 3.2976, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.312936063136223e-05, |
|
"loss": 3.2955, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.284341759121583e-05, |
|
"loss": 3.2894, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.255747455106943e-05, |
|
"loss": 3.2687, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.2271531510923026e-05, |
|
"loss": 3.2548, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.1985588470776624e-05, |
|
"loss": 3.2446, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.169964543063022e-05, |
|
"loss": 3.2264, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.141370239048382e-05, |
|
"loss": 3.2166, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.112775935033741e-05, |
|
"loss": 3.2097, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.084181631019101e-05, |
|
"loss": 3.1955, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.055587327004461e-05, |
|
"loss": 3.181, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.0269930229898208e-05, |
|
"loss": 3.1758, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.9983987189751807e-05, |
|
"loss": 3.1688, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.9698044149605398e-05, |
|
"loss": 3.1284, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.9412101109458993e-05, |
|
"loss": 3.1256, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.9126158069312592e-05, |
|
"loss": 3.1062, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.884021502916619e-05, |
|
"loss": 3.1051, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.855427198901979e-05, |
|
"loss": 3.108, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.8268328948873384e-05, |
|
"loss": 3.0932, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.7982385908726982e-05, |
|
"loss": 3.0766, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.769644286858058e-05, |
|
"loss": 3.0501, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.7410499828434176e-05, |
|
"loss": 3.0634, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.7124556788287774e-05, |
|
"loss": 3.0524, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.6838613748141373e-05, |
|
"loss": 3.0556, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.655267070799497e-05, |
|
"loss": 3.0276, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.6266727667848566e-05, |
|
"loss": 3.0364, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.5980784627702165e-05, |
|
"loss": 3.0186, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.5694841587555763e-05, |
|
"loss": 3.0071, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.540889854740936e-05, |
|
"loss": 3.0144, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.5122955507262953e-05, |
|
"loss": 3.0003, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4837012467116552e-05, |
|
"loss": 2.9854, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.455106942697015e-05, |
|
"loss": 2.9696, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.426512638682375e-05, |
|
"loss": 2.9651, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.397918334667734e-05, |
|
"loss": 2.9463, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.369324030653094e-05, |
|
"loss": 2.9379, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 2.3407297266384537e-05, |
|
"loss": 2.9409, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.3121354226238136e-05, |
|
"loss": 2.9315, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.283541118609173e-05, |
|
"loss": 2.9407, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.254946814594533e-05, |
|
"loss": 2.9327, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.2263525105798928e-05, |
|
"loss": 2.9282, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.1977582065652523e-05, |
|
"loss": 2.9177, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.1691639025506118e-05, |
|
"loss": 2.9109, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.1405695985359716e-05, |
|
"loss": 2.9009, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.1119752945213315e-05, |
|
"loss": 2.8929, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.0833809905066913e-05, |
|
"loss": 2.8863, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.0547866864920508e-05, |
|
"loss": 2.8912, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.0261923824774107e-05, |
|
"loss": 2.8757, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 1.9975980784627705e-05, |
|
"loss": 2.874, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.96900377444813e-05, |
|
"loss": 2.8468, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.9404094704334895e-05, |
|
"loss": 2.866, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.9118151664188494e-05, |
|
"loss": 2.8428, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.8832208624042092e-05, |
|
"loss": 2.8474, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.8546265583895687e-05, |
|
"loss": 2.8368, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.8260322543749286e-05, |
|
"loss": 2.8377, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.7974379503602884e-05, |
|
"loss": 2.838, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.7688436463456483e-05, |
|
"loss": 2.83, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.7402493423310078e-05, |
|
"loss": 2.8275, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.7116550383163673e-05, |
|
"loss": 2.8106, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.683060734301727e-05, |
|
"loss": 2.8158, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.654466430287087e-05, |
|
"loss": 2.814, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.6258721262724465e-05, |
|
"loss": 2.8101, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.5972778222578063e-05, |
|
"loss": 2.8015, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.5686835182431662e-05, |
|
"loss": 2.7999, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.540089214228526e-05, |
|
"loss": 2.7919, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.5114949102138854e-05, |
|
"loss": 2.7953, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.482900606199245e-05, |
|
"loss": 2.7795, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.4543063021846049e-05, |
|
"loss": 2.7648, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4257119981699646e-05, |
|
"loss": 2.773, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.3971176941553244e-05, |
|
"loss": 2.7663, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.368523390140684e-05, |
|
"loss": 2.7592, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.3399290861260438e-05, |
|
"loss": 2.7543, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.3113347821114036e-05, |
|
"loss": 2.7626, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.2827404780967631e-05, |
|
"loss": 2.755, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.2541461740821228e-05, |
|
"loss": 2.7538, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2255518700674826e-05, |
|
"loss": 2.7381, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.1969575660528423e-05, |
|
"loss": 2.7491, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.168363262038202e-05, |
|
"loss": 2.7441, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.1397689580235618e-05, |
|
"loss": 2.7369, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.1111746540089215e-05, |
|
"loss": 2.7466, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.0825803499942812e-05, |
|
"loss": 2.7363, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.0539860459796409e-05, |
|
"loss": 2.7294, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.0253917419650007e-05, |
|
"loss": 2.7304, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.967974379503602e-06, |
|
"loss": 2.7278, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 9.6820313393572e-06, |
|
"loss": 2.7231, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 9.396088299210797e-06, |
|
"loss": 2.7267, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 9.110145259064396e-06, |
|
"loss": 2.7133, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 8.824202218917991e-06, |
|
"loss": 2.7088, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 8.53825917877159e-06, |
|
"loss": 2.7115, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 8.252316138625186e-06, |
|
"loss": 2.7049, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 7.966373098478785e-06, |
|
"loss": 2.7083, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 7.68043005833238e-06, |
|
"loss": 2.6946, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 7.394487018185977e-06, |
|
"loss": 2.691, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 7.108543978039575e-06, |
|
"loss": 2.7066, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 6.8226009378931726e-06, |
|
"loss": 2.6896, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 6.5366578977467685e-06, |
|
"loss": 2.6976, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.250714857600366e-06, |
|
"loss": 2.6989, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 5.964771817453964e-06, |
|
"loss": 2.6869, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 5.6788287773075605e-06, |
|
"loss": 2.6898, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 5.392885737161157e-06, |
|
"loss": 2.6851, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 5.106942697014755e-06, |
|
"loss": 2.6871, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 4.820999656868352e-06, |
|
"loss": 2.6719, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.535056616721949e-06, |
|
"loss": 2.6704, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.249113576575546e-06, |
|
"loss": 2.6776, |
|
"step": 80000 |
|
} |
|
], |
|
"max_steps": 87430, |
|
"num_train_epochs": 10, |
|
"total_flos": 9.648556042711978e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|