|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 30790, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.91880480675544e-05, |
|
"loss": 3.1126, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8376096135108804e-05, |
|
"loss": 2.5083, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.7564144202663205e-05, |
|
"loss": 2.3491, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.6752192270217606e-05, |
|
"loss": 2.2759, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.5940240337772006e-05, |
|
"loss": 2.2089, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.512828840532641e-05, |
|
"loss": 2.1629, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.431633647288081e-05, |
|
"loss": 2.0658, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.350438454043521e-05, |
|
"loss": 2.0382, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.269243260798961e-05, |
|
"loss": 1.9921, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.188048067554401e-05, |
|
"loss": 2.0076, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.106852874309841e-05, |
|
"loss": 1.9684, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.025657681065281e-05, |
|
"loss": 1.9399, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.944462487820721e-05, |
|
"loss": 1.8806, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.8632672945761614e-05, |
|
"loss": 1.8606, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.7820721013316015e-05, |
|
"loss": 1.8487, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.7008769080870415e-05, |
|
"loss": 1.8272, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.6196817148424816e-05, |
|
"loss": 1.8094, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.538486521597922e-05, |
|
"loss": 1.8095, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.457291328353362e-05, |
|
"loss": 1.7765, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.376096135108802e-05, |
|
"loss": 1.7497, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.294900941864242e-05, |
|
"loss": 1.7282, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.213705748619682e-05, |
|
"loss": 1.7146, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.132510555375122e-05, |
|
"loss": 1.7382, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.051315362130562e-05, |
|
"loss": 1.7151, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.970120168886002e-05, |
|
"loss": 1.6829, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.888924975641442e-05, |
|
"loss": 1.6476, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8077297823968824e-05, |
|
"loss": 1.6511, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.7265345891523225e-05, |
|
"loss": 1.6491, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6453393959077623e-05, |
|
"loss": 1.6518, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.5641442026632023e-05, |
|
"loss": 1.6616, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4829490094186424e-05, |
|
"loss": 1.6279, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.401753816174083e-05, |
|
"loss": 1.6113, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3205586229295226e-05, |
|
"loss": 1.5877, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.2393634296849627e-05, |
|
"loss": 1.593, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.158168236440403e-05, |
|
"loss": 1.5803, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.0769730431958428e-05, |
|
"loss": 1.587, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.995777849951283e-05, |
|
"loss": 1.6017, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.914582656706723e-05, |
|
"loss": 1.5571, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.833387463462163e-05, |
|
"loss": 1.5448, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.752192270217603e-05, |
|
"loss": 1.5461, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.6709970769730432e-05, |
|
"loss": 1.5556, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5898018837284833e-05, |
|
"loss": 1.5499, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.5086066904839236e-05, |
|
"loss": 1.5442, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4274114972393635e-05, |
|
"loss": 1.519, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.3462163039948036e-05, |
|
"loss": 1.5078, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2650211107502435e-05, |
|
"loss": 1.5119, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.1838259175056837e-05, |
|
"loss": 1.517, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.1026307242611238e-05, |
|
"loss": 1.5205, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.0214355310165639e-05, |
|
"loss": 1.5194, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 9.40240337772004e-06, |
|
"loss": 1.507, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 8.59045144527444e-06, |
|
"loss": 1.4918, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 7.778499512828841e-06, |
|
"loss": 1.5041, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.966547580383241e-06, |
|
"loss": 1.5041, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 6.154595647937642e-06, |
|
"loss": 1.4848, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.342643715492044e-06, |
|
"loss": 1.4847, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.530691783046444e-06, |
|
"loss": 1.4702, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 3.718739850600845e-06, |
|
"loss": 1.4893, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.9067879181552453e-06, |
|
"loss": 1.4766, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.094835985709646e-06, |
|
"loss": 1.4724, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.2828840532640467e-06, |
|
"loss": 1.4887, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.7093212081844755e-07, |
|
"loss": 1.4828, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 30790, |
|
"total_flos": 1.1015677873363968e+17, |
|
"train_loss": 1.726544579967116, |
|
"train_runtime": 9217.6926, |
|
"train_samples_per_second": 33.394, |
|
"train_steps_per_second": 3.34 |
|
} |
|
], |
|
"max_steps": 30790, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.1015677873363968e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|