|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9789902790843525, |
|
"global_step": 38000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.934671265809554e-05, |
|
"loss": 2.5519, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8693425316191076e-05, |
|
"loss": 2.5343, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.804013797428661e-05, |
|
"loss": 2.4991, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.738685063238215e-05, |
|
"loss": 2.475, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6733563290477686e-05, |
|
"loss": 2.4791, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.608027594857322e-05, |
|
"loss": 2.4725, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.542698860666876e-05, |
|
"loss": 2.4255, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4773701264764296e-05, |
|
"loss": 2.4452, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.412041392285983e-05, |
|
"loss": 2.4272, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.346712658095537e-05, |
|
"loss": 2.435, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2813839239050906e-05, |
|
"loss": 2.4164, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.216055189714644e-05, |
|
"loss": 2.3954, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.150726455524198e-05, |
|
"loss": 2.3677, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0853977213337515e-05, |
|
"loss": 2.3643, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.020068987143305e-05, |
|
"loss": 2.3596, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.954740252952859e-05, |
|
"loss": 2.3601, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8894115187624125e-05, |
|
"loss": 2.3391, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.824082784571966e-05, |
|
"loss": 2.3418, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.75875405038152e-05, |
|
"loss": 2.3256, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.6934253161910735e-05, |
|
"loss": 2.3386, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.628096582000627e-05, |
|
"loss": 2.3078, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.562767847810181e-05, |
|
"loss": 2.3059, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.497439113619735e-05, |
|
"loss": 2.3098, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.432110379429289e-05, |
|
"loss": 2.3021, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.366781645238842e-05, |
|
"loss": 2.2801, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3014529110483955e-05, |
|
"loss": 2.0588, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.236124176857949e-05, |
|
"loss": 1.8245, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.170795442667503e-05, |
|
"loss": 1.8411, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.1054667084770565e-05, |
|
"loss": 1.8267, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0401379742866105e-05, |
|
"loss": 1.8376, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9748092400961642e-05, |
|
"loss": 1.863, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.909480505905718e-05, |
|
"loss": 1.8302, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8441517717152715e-05, |
|
"loss": 1.8444, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7788230375248252e-05, |
|
"loss": 1.8417, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7134943033343785e-05, |
|
"loss": 1.8576, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.648165569143932e-05, |
|
"loss": 1.8421, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5828368349534858e-05, |
|
"loss": 1.8459, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.51750810076304e-05, |
|
"loss": 1.8438, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4521793665725935e-05, |
|
"loss": 1.8263, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.386850632382147e-05, |
|
"loss": 1.8451, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3215218981917008e-05, |
|
"loss": 1.8439, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.256193164001254e-05, |
|
"loss": 1.8529, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.190864429810808e-05, |
|
"loss": 1.8184, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1255356956203618e-05, |
|
"loss": 1.8364, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0602069614299155e-05, |
|
"loss": 1.831, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.994878227239469e-05, |
|
"loss": 1.8103, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9295494930490228e-05, |
|
"loss": 1.8078, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8642207588585765e-05, |
|
"loss": 1.8047, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.79889202466813e-05, |
|
"loss": 1.8155, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7335632904776838e-05, |
|
"loss": 1.8136, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6682345562872375e-05, |
|
"loss": 1.8019, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.602905822096791e-05, |
|
"loss": 1.3885, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5375770879063448e-05, |
|
"loss": 1.3774, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4722483537158985e-05, |
|
"loss": 1.3693, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4069196195254521e-05, |
|
"loss": 1.3827, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.341590885335006e-05, |
|
"loss": 1.3755, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2762621511445595e-05, |
|
"loss": 1.3739, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2109334169541131e-05, |
|
"loss": 1.3798, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1456046827636668e-05, |
|
"loss": 1.3705, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0802759485732206e-05, |
|
"loss": 1.3847, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0149472143827741e-05, |
|
"loss": 1.371, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.49618480192328e-06, |
|
"loss": 1.3661, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.842897460018814e-06, |
|
"loss": 1.3863, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.189610118114353e-06, |
|
"loss": 1.3819, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.536322776209888e-06, |
|
"loss": 1.3762, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.883035434305425e-06, |
|
"loss": 1.3733, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.229748092400962e-06, |
|
"loss": 1.3591, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.576460750496499e-06, |
|
"loss": 1.3756, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.923173408592035e-06, |
|
"loss": 1.3447, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.269886066687572e-06, |
|
"loss": 1.3656, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.616598724783109e-06, |
|
"loss": 1.3549, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.963311382878645e-06, |
|
"loss": 1.3691, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.310024040974182e-06, |
|
"loss": 1.3534, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6567366990697188e-06, |
|
"loss": 1.3391, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0034493571652555e-06, |
|
"loss": 1.3589, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.5016201526079235e-07, |
|
"loss": 1.3393, |
|
"step": 38000 |
|
} |
|
], |
|
"max_steps": 38268, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.948577863019397e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|