|
{ |
|
"best_metric": 0.7333333333333333, |
|
"best_model_checkpoint": "beit-base-patch16-224-pt22k-ft22k/checkpoint-8", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 24, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7333333333333333, |
|
"eval_loss": 3.1590805053710938, |
|
"eval_runtime": 3.4474, |
|
"eval_samples_per_second": 69.617, |
|
"eval_steps_per_second": 2.321, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.03333333333333333, |
|
"loss": 5.12, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.26666666666666666, |
|
"eval_loss": 2.633744716644287, |
|
"eval_runtime": 3.2534, |
|
"eval_samples_per_second": 73.768, |
|
"eval_steps_per_second": 2.459, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 0.009523809523809525, |
|
"loss": 1.8305, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7333333333333333, |
|
"eval_loss": 0.6371921300888062, |
|
"eval_runtime": 3.6339, |
|
"eval_samples_per_second": 66.045, |
|
"eval_steps_per_second": 2.202, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 24, |
|
"total_flos": 2.32376605913088e+17, |
|
"train_loss": 3.054683963457743, |
|
"train_runtime": 175.6001, |
|
"train_samples_per_second": 17.084, |
|
"train_steps_per_second": 0.137 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 24, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.32376605913088e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|