deberta-large_lemon_5k_3_p3 / trainer_state.json
stuartmesham's picture
Upload with huggingface_hub
4da7f18
raw
history blame contribute delete
No virus
1.75 kB
{
"best_metric": 0.9414405348897014,
"best_model_checkpoint": "model_saves/deberta-large_lemon_5k_3_p3/checkpoint-268",
"epoch": 4.0,
"global_step": 1072,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_accuracy": 0.9414405348897014,
"eval_loss": 0.4162137806415558,
"eval_runtime": 4.632,
"eval_samples_per_second": 946.027,
"eval_steps_per_second": 7.556,
"step": 268
},
{
"epoch": 2.0,
"eval_accuracy": 0.9412448708058005,
"eval_loss": 0.4352792203426361,
"eval_runtime": 4.6349,
"eval_samples_per_second": 945.428,
"eval_steps_per_second": 7.551,
"step": 536
},
{
"epoch": 3.0,
"eval_accuracy": 0.9402106463623252,
"eval_loss": 0.47978702187538147,
"eval_runtime": 4.645,
"eval_samples_per_second": 943.388,
"eval_steps_per_second": 7.535,
"step": 804
},
{
"epoch": 3.73,
"learning_rate": 1e-05,
"loss": 0.2573,
"step": 1000
},
{
"epoch": 4.0,
"eval_accuracy": 0.9398137277921265,
"eval_loss": 0.5360333323478699,
"eval_runtime": 4.6334,
"eval_samples_per_second": 945.747,
"eval_steps_per_second": 7.554,
"step": 1072
},
{
"epoch": 4.0,
"step": 1072,
"total_flos": 1.6078384287186944e+16,
"train_loss": 0.2507487820155585,
"train_runtime": 457.1298,
"train_samples_per_second": 1125.632,
"train_steps_per_second": 8.794
}
],
"max_steps": 4020,
"num_train_epochs": 15,
"total_flos": 1.6078384287186944e+16,
"trial_name": null,
"trial_params": null
}