dima806's picture
Upload folder using huggingface_hub
e9ab4ac verified
raw
history blame
2.07 kB
{
"best_metric": 1.810072422027588,
"best_model_checkpoint": "flower_groups_image_detection/checkpoint-3102",
"epoch": 2.0,
"eval_steps": 500,
"global_step": 3102,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.3223726627981947,
"grad_norm": 3.30501389503479,
"learning_rate": 1.7051114023591087e-07,
"loss": 1.7502,
"step": 500
},
{
"epoch": 0.6447453255963894,
"grad_norm": 3.4669442176818848,
"learning_rate": 1.3774574049803407e-07,
"loss": 1.7329,
"step": 1000
},
{
"epoch": 0.9671179883945842,
"grad_norm": 5.2170257568359375,
"learning_rate": 1.0498034076015727e-07,
"loss": 1.7245,
"step": 1500
},
{
"epoch": 1.0,
"eval_accuracy": 0.7796323173681664,
"eval_loss": 1.8147186040878296,
"eval_runtime": 321.6906,
"eval_samples_per_second": 102.807,
"eval_steps_per_second": 12.851,
"step": 1551
},
{
"epoch": 1.2894906511927788,
"grad_norm": 2.947956085205078,
"learning_rate": 7.221494102228047e-08,
"loss": 1.7307,
"step": 2000
},
{
"epoch": 1.6118633139909737,
"grad_norm": 3.766296863555908,
"learning_rate": 3.944954128440367e-08,
"loss": 1.7229,
"step": 2500
},
{
"epoch": 1.9342359767891684,
"grad_norm": 3.5768988132476807,
"learning_rate": 6.684141546526867e-09,
"loss": 1.7354,
"step": 3000
},
{
"epoch": 2.0,
"eval_accuracy": 0.7807813255926463,
"eval_loss": 1.810072422027588,
"eval_runtime": 325.6105,
"eval_samples_per_second": 101.569,
"eval_steps_per_second": 12.696,
"step": 3102
}
],
"logging_steps": 500,
"max_steps": 3102,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"total_flos": 7.697230908500754e+18,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}