|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9983934941049403, |
|
"global_step": 110000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9924364129234474e-05, |
|
"loss": 4.7552, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.984872825846895e-05, |
|
"loss": 4.6634, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9773092387703424e-05, |
|
"loss": 4.583, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.96974565169379e-05, |
|
"loss": 4.5872, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.962182064617237e-05, |
|
"loss": 4.5253, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9546184775406845e-05, |
|
"loss": 4.4986, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.947054890464132e-05, |
|
"loss": 4.5247, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9394913033875794e-05, |
|
"loss": 4.4397, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.931927716311027e-05, |
|
"loss": 4.5077, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9243641292344744e-05, |
|
"loss": 4.4674, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9168005421579216e-05, |
|
"loss": 4.4637, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9092369550813694e-05, |
|
"loss": 4.4311, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9016733680048165e-05, |
|
"loss": 4.4189, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.8941097809282644e-05, |
|
"loss": 4.3811, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8865461938517115e-05, |
|
"loss": 4.4015, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8789826067751587e-05, |
|
"loss": 4.3717, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8714190196986065e-05, |
|
"loss": 4.3931, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8638554326220536e-05, |
|
"loss": 4.3651, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8562918455455014e-05, |
|
"loss": 4.3974, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8487282584689486e-05, |
|
"loss": 4.3297, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8411646713923964e-05, |
|
"loss": 4.3414, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8336010843158436e-05, |
|
"loss": 4.3682, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.826037497239291e-05, |
|
"loss": 4.3614, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8184739101627385e-05, |
|
"loss": 4.368, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.810910323086186e-05, |
|
"loss": 4.3026, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8033467360096335e-05, |
|
"loss": 4.3342, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7957831489330807e-05, |
|
"loss": 4.3359, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.788219561856528e-05, |
|
"loss": 4.3332, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7806559747799756e-05, |
|
"loss": 4.2581, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.773092387703423e-05, |
|
"loss": 4.3509, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7655288006268706e-05, |
|
"loss": 4.2819, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.757965213550318e-05, |
|
"loss": 4.2966, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.750401626473765e-05, |
|
"loss": 4.2967, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.742838039397213e-05, |
|
"loss": 4.2915, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.73527445232066e-05, |
|
"loss": 4.2473, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.727710865244108e-05, |
|
"loss": 4.2357, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.720147278167555e-05, |
|
"loss": 4.2754, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.712583691091002e-05, |
|
"loss": 4.2699, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.70502010401445e-05, |
|
"loss": 4.2866, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.697456516937897e-05, |
|
"loss": 4.2778, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.689892929861345e-05, |
|
"loss": 4.2414, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.682329342784792e-05, |
|
"loss": 4.2963, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.674765755708239e-05, |
|
"loss": 4.3053, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.667202168631687e-05, |
|
"loss": 4.2914, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.659638581555134e-05, |
|
"loss": 4.2527, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.652074994478582e-05, |
|
"loss": 4.251, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.644511407402029e-05, |
|
"loss": 4.2694, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.636947820325476e-05, |
|
"loss": 4.2306, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.629384233248924e-05, |
|
"loss": 4.2451, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.621820646172371e-05, |
|
"loss": 4.232, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.614257059095819e-05, |
|
"loss": 4.2506, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.606693472019266e-05, |
|
"loss": 4.2425, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.599129884942713e-05, |
|
"loss": 4.2495, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.591566297866161e-05, |
|
"loss": 4.2475, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.584002710789608e-05, |
|
"loss": 4.2516, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.576439123713056e-05, |
|
"loss": 4.2478, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.568875536636503e-05, |
|
"loss": 4.2214, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.56131194955995e-05, |
|
"loss": 4.244, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.553748362483398e-05, |
|
"loss": 4.2125, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.546184775406845e-05, |
|
"loss": 4.251, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.538621188330293e-05, |
|
"loss": 4.1668, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.53105760125374e-05, |
|
"loss": 4.2469, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.523494014177188e-05, |
|
"loss": 4.2189, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.515930427100635e-05, |
|
"loss": 4.2727, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5083668400240824e-05, |
|
"loss": 4.2318, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.50080325294753e-05, |
|
"loss": 4.2083, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4932396658709774e-05, |
|
"loss": 4.1744, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.485676078794425e-05, |
|
"loss": 4.1938, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.478112491717872e-05, |
|
"loss": 4.1745, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4705489046413195e-05, |
|
"loss": 4.1725, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.462985317564767e-05, |
|
"loss": 4.2081, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4554217304882145e-05, |
|
"loss": 4.1804, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.447858143411662e-05, |
|
"loss": 4.1977, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4402945563351094e-05, |
|
"loss": 4.1939, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4327309692585566e-05, |
|
"loss": 4.2137, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4251673821820044e-05, |
|
"loss": 4.1899, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4176037951054515e-05, |
|
"loss": 4.1409, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4100402080288994e-05, |
|
"loss": 4.155, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4024766209523465e-05, |
|
"loss": 4.2214, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3949130338757937e-05, |
|
"loss": 4.1553, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3873494467992415e-05, |
|
"loss": 4.1859, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3797858597226886e-05, |
|
"loss": 4.2127, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3722222726461365e-05, |
|
"loss": 4.1758, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3646586855695836e-05, |
|
"loss": 4.2035, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.357095098493031e-05, |
|
"loss": 4.1486, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3495315114164786e-05, |
|
"loss": 4.2252, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.341967924339926e-05, |
|
"loss": 4.1585, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3344043372633735e-05, |
|
"loss": 4.1804, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.326840750186821e-05, |
|
"loss": 4.2005, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.319277163110268e-05, |
|
"loss": 4.1257, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3117135760337157e-05, |
|
"loss": 4.1687, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.304149988957163e-05, |
|
"loss": 4.1845, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2965864018806106e-05, |
|
"loss": 4.1383, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.289022814804058e-05, |
|
"loss": 4.1346, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.281459227727505e-05, |
|
"loss": 4.1611, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.273895640650953e-05, |
|
"loss": 4.1542, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2663320535744e-05, |
|
"loss": 4.2022, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.258768466497848e-05, |
|
"loss": 4.1973, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.251204879421295e-05, |
|
"loss": 4.1386, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.243641292344743e-05, |
|
"loss": 4.1542, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.23607770526819e-05, |
|
"loss": 4.1301, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.228514118191637e-05, |
|
"loss": 4.1239, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.220950531115085e-05, |
|
"loss": 4.1453, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.213386944038532e-05, |
|
"loss": 4.1403, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.20582335696198e-05, |
|
"loss": 4.1665, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.198259769885427e-05, |
|
"loss": 4.1306, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.190696182808874e-05, |
|
"loss": 4.1622, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.183132595732322e-05, |
|
"loss": 4.1524, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.175569008655769e-05, |
|
"loss": 4.1656, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.168005421579217e-05, |
|
"loss": 4.092, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.160441834502664e-05, |
|
"loss": 4.1483, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.152878247426111e-05, |
|
"loss": 4.1474, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.145314660349559e-05, |
|
"loss": 4.1685, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.137751073273006e-05, |
|
"loss": 4.1404, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.130187486196454e-05, |
|
"loss": 4.0895, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.122623899119901e-05, |
|
"loss": 4.1132, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.115060312043348e-05, |
|
"loss": 4.1438, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.107496724966796e-05, |
|
"loss": 4.1197, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.099933137890243e-05, |
|
"loss": 4.1568, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.092369550813691e-05, |
|
"loss": 4.1546, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.084805963737138e-05, |
|
"loss": 4.1391, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.077242376660585e-05, |
|
"loss": 4.1197, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.069678789584033e-05, |
|
"loss": 4.1323, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.06211520250748e-05, |
|
"loss": 4.1305, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.054551615430928e-05, |
|
"loss": 4.0884, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.046988028354375e-05, |
|
"loss": 4.1144, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0394244412778224e-05, |
|
"loss": 4.1014, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.03186085420127e-05, |
|
"loss": 4.0915, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0242972671247174e-05, |
|
"loss": 4.158, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.016733680048165e-05, |
|
"loss": 4.13, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0091700929716124e-05, |
|
"loss": 4.1329, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.0016065058950595e-05, |
|
"loss": 4.0738, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.994042918818507e-05, |
|
"loss": 4.1312, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9864793317419545e-05, |
|
"loss": 4.1058, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.978915744665402e-05, |
|
"loss": 4.0855, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9713521575888495e-05, |
|
"loss": 4.1531, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9637885705122966e-05, |
|
"loss": 4.1676, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9562249834357444e-05, |
|
"loss": 4.0996, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9486613963591916e-05, |
|
"loss": 4.0781, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9410978092826394e-05, |
|
"loss": 4.0646, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9335342222060865e-05, |
|
"loss": 4.0895, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9259706351295344e-05, |
|
"loss": 4.1257, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9184070480529815e-05, |
|
"loss": 4.0724, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.910843460976429e-05, |
|
"loss": 4.082, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9032798738998765e-05, |
|
"loss": 4.1544, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.8957162868233236e-05, |
|
"loss": 4.124, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8881526997467715e-05, |
|
"loss": 4.0763, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8805891126702186e-05, |
|
"loss": 4.1284, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.873025525593666e-05, |
|
"loss": 4.0564, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8654619385171136e-05, |
|
"loss": 4.1, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.857898351440561e-05, |
|
"loss": 4.1089, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8503347643640085e-05, |
|
"loss": 4.092, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.842771177287456e-05, |
|
"loss": 4.127, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.835207590210903e-05, |
|
"loss": 4.1307, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.827644003134351e-05, |
|
"loss": 4.0732, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.820080416057798e-05, |
|
"loss": 4.0956, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8125168289812456e-05, |
|
"loss": 4.0683, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.804953241904693e-05, |
|
"loss": 4.0822, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.79738965482814e-05, |
|
"loss": 4.1471, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.789826067751588e-05, |
|
"loss": 4.127, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.782262480675035e-05, |
|
"loss": 4.1113, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.774698893598483e-05, |
|
"loss": 4.0932, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.76713530652193e-05, |
|
"loss": 4.1176, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.759571719445377e-05, |
|
"loss": 4.1078, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.752008132368825e-05, |
|
"loss": 4.0988, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.744444545292272e-05, |
|
"loss": 4.1075, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.73688095821572e-05, |
|
"loss": 4.0456, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.729317371139167e-05, |
|
"loss": 4.1451, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.721753784062614e-05, |
|
"loss": 4.116, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.714190196986062e-05, |
|
"loss": 4.0827, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.706626609909509e-05, |
|
"loss": 4.1093, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.699063022832957e-05, |
|
"loss": 4.0878, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.691499435756404e-05, |
|
"loss": 4.1075, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.683935848679851e-05, |
|
"loss": 4.0942, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.676372261603299e-05, |
|
"loss": 4.1082, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.668808674526746e-05, |
|
"loss": 4.0797, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.661245087450194e-05, |
|
"loss": 4.0613, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.653681500373641e-05, |
|
"loss": 4.1392, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.646117913297089e-05, |
|
"loss": 4.064, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.638554326220536e-05, |
|
"loss": 4.0528, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.630990739143983e-05, |
|
"loss": 4.0703, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.623427152067431e-05, |
|
"loss": 4.0334, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.615863564990878e-05, |
|
"loss": 4.0341, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.608299977914326e-05, |
|
"loss": 4.079, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.600736390837773e-05, |
|
"loss": 4.0658, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.5931728037612203e-05, |
|
"loss": 4.0723, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.585609216684668e-05, |
|
"loss": 4.0875, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.578045629608115e-05, |
|
"loss": 4.0256, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.570482042531563e-05, |
|
"loss": 4.1114, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.56291845545501e-05, |
|
"loss": 4.0955, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5553548683784574e-05, |
|
"loss": 4.1109, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.547791281301905e-05, |
|
"loss": 4.0938, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5402276942253524e-05, |
|
"loss": 4.0839, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5326641071488e-05, |
|
"loss": 4.0671, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5251005200722474e-05, |
|
"loss": 4.0446, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5175369329956945e-05, |
|
"loss": 4.0323, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5099733459191423e-05, |
|
"loss": 4.0442, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5024097588425895e-05, |
|
"loss": 4.0989, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.494846171766037e-05, |
|
"loss": 4.0851, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4872825846894845e-05, |
|
"loss": 4.0733, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4797189976129316e-05, |
|
"loss": 4.0693, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4721554105363794e-05, |
|
"loss": 4.0285, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4645918234598266e-05, |
|
"loss": 4.1077, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4570282363832744e-05, |
|
"loss": 4.0844, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4494646493067215e-05, |
|
"loss": 4.0826, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.441901062230169e-05, |
|
"loss": 4.0967, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4343374751536165e-05, |
|
"loss": 4.0536, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.426773888077064e-05, |
|
"loss": 4.1287, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4192103010005115e-05, |
|
"loss": 4.0883, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4116467139239586e-05, |
|
"loss": 4.0862, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.404083126847406e-05, |
|
"loss": 4.0683, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3965195397708536e-05, |
|
"loss": 4.0465, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.388955952694301e-05, |
|
"loss": 4.0712, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3813923656177486e-05, |
|
"loss": 4.0737, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.373828778541196e-05, |
|
"loss": 4.0399, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.366265191464643e-05, |
|
"loss": 4.0716, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.358701604388091e-05, |
|
"loss": 4.0129, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.351138017311538e-05, |
|
"loss": 4.0711, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.343574430234986e-05, |
|
"loss": 4.0583, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.336010843158433e-05, |
|
"loss": 4.0821, |
|
"step": 110000 |
|
} |
|
], |
|
"max_steps": 330531, |
|
"num_train_epochs": 3, |
|
"total_flos": 7185530880000000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|