File size: 1,291 Bytes
d27692e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
{
"best_metric": 1.79765784740448,
"best_model_checkpoint": "flower_groups_image_detection/checkpoint-1551",
"epoch": 1.0,
"eval_steps": 500,
"global_step": 1551,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.3223726627981947,
"grad_norm": 2.987184762954712,
"learning_rate": 1.4003997335109924e-07,
"loss": 1.714,
"step": 500
},
{
"epoch": 0.6447453255963894,
"grad_norm": 5.513508319854736,
"learning_rate": 7.341772151898734e-08,
"loss": 1.7232,
"step": 1000
},
{
"epoch": 0.9671179883945842,
"grad_norm": 4.085524082183838,
"learning_rate": 6.795469686875415e-09,
"loss": 1.7356,
"step": 1500
},
{
"epoch": 1.0,
"eval_accuracy": 0.7784833091436865,
"eval_loss": 1.79765784740448,
"eval_runtime": 332.6377,
"eval_samples_per_second": 99.423,
"eval_steps_per_second": 12.428,
"step": 1551
}
],
"logging_steps": 500,
"max_steps": 1551,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"total_flos": 3.848615454250377e+18,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}
|