albert-news-classification / trainer_state.json
clhuang's picture
commit from clhuang
a7fa3c6
raw
history blame
No virus
2.2 kB
{
"best_metric": 0.2616243362426758,
"best_model_checkpoint": "./checkpoints/checkpoint-1964",
"epoch": 4.0,
"global_step": 1964,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_accuracy": 0.8884644766997708,
"eval_f1": 0.8884644766997708,
"eval_loss": 0.4412623643875122,
"eval_precision": 0.8884644766997708,
"eval_recall": 0.8884644766997708,
"eval_runtime": 43.6404,
"eval_samples_per_second": 59.99,
"eval_steps_per_second": 1.879,
"step": 491
},
{
"epoch": 1.02,
"learning_rate": 2e-05,
"loss": 1.3111,
"step": 500
},
{
"epoch": 2.0,
"eval_accuracy": 0.9037433155080213,
"eval_f1": 0.9037433155080212,
"eval_loss": 0.31344300508499146,
"eval_precision": 0.9037433155080213,
"eval_recall": 0.9037433155080213,
"eval_runtime": 43.7135,
"eval_samples_per_second": 59.89,
"eval_steps_per_second": 1.876,
"step": 982
},
{
"epoch": 2.04,
"learning_rate": 1.591169255928046e-05,
"loss": 0.3547,
"step": 1000
},
{
"epoch": 3.0,
"eval_accuracy": 0.9113827349121467,
"eval_f1": 0.9113827349121467,
"eval_loss": 0.2822723388671875,
"eval_precision": 0.9113827349121467,
"eval_recall": 0.9113827349121467,
"eval_runtime": 43.7763,
"eval_samples_per_second": 59.804,
"eval_steps_per_second": 1.873,
"step": 1473
},
{
"epoch": 3.05,
"learning_rate": 1.1823385118560918e-05,
"loss": 0.2375,
"step": 1500
},
{
"epoch": 4.0,
"eval_accuracy": 0.9182582123758595,
"eval_f1": 0.9182582123758595,
"eval_loss": 0.2616243362426758,
"eval_precision": 0.9182582123758595,
"eval_recall": 0.9182582123758595,
"eval_runtime": 43.7951,
"eval_samples_per_second": 59.778,
"eval_steps_per_second": 1.872,
"step": 1964
}
],
"max_steps": 2946,
"num_train_epochs": 6,
"total_flos": 1100711158866000.0,
"trial_name": null,
"trial_params": null
}