xshubhamx's picture
Upload folder using huggingface_hub
3c5ca7a verified
raw
history blame
2.56 kB
{
"best_metric": 0.7276084501581802,
"best_model_checkpoint": "InLegalBERT/checkpoint-1286",
"epoch": 2.0,
"eval_steps": 500,
"global_step": 1286,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.78,
"grad_norm": 6.225058078765869,
"learning_rate": 4.742871954380509e-05,
"loss": 1.065,
"step": 500
},
{
"epoch": 1.0,
"eval_accuracy": 0.7993803253292022,
"eval_f1_macro": 0.6028764374506596,
"eval_f1_micro": 0.7993803253292022,
"eval_f1_weighted": 0.780434475079005,
"eval_loss": 0.6395189166069031,
"eval_macro_fpr": 0.01853296480715729,
"eval_macro_sensitivity": 0.6307714529979154,
"eval_macro_specificity": 0.9847175193097543,
"eval_precision": 0.7817573111259606,
"eval_precision_macro": 0.6194286814679265,
"eval_recall": 0.7993803253292022,
"eval_recall_macro": 0.6307714529979154,
"eval_runtime": 29.4696,
"eval_samples_per_second": 43.808,
"eval_steps_per_second": 5.497,
"eval_weighted_fpr": 0.017610661589719183,
"eval_weighted_sensitivity": 0.7993803253292022,
"eval_weighted_specificity": 0.9713824643171124,
"step": 643
},
{
"epoch": 1.56,
"grad_norm": 8.756747245788574,
"learning_rate": 4.483670295489891e-05,
"loss": 0.5866,
"step": 1000
},
{
"epoch": 2.0,
"eval_accuracy": 0.8187451587916343,
"eval_f1_macro": 0.7276084501581802,
"eval_f1_micro": 0.8187451587916342,
"eval_f1_weighted": 0.8152450677099363,
"eval_loss": 0.6906521916389465,
"eval_macro_fpr": 0.01610571025604924,
"eval_macro_sensitivity": 0.7365748487318147,
"eval_macro_specificity": 0.9863504064994671,
"eval_precision": 0.8198905458065566,
"eval_precision_macro": 0.7284980957351811,
"eval_recall": 0.8187451587916343,
"eval_recall_macro": 0.7365748487318147,
"eval_runtime": 57.1075,
"eval_samples_per_second": 22.606,
"eval_steps_per_second": 2.837,
"eval_weighted_fpr": 0.015566790846194785,
"eval_weighted_sensitivity": 0.8187451587916343,
"eval_weighted_specificity": 0.976510938700373,
"step": 1286
}
],
"logging_steps": 500,
"max_steps": 9645,
"num_input_tokens_seen": 0,
"num_train_epochs": 15,
"save_steps": 500,
"total_flos": 2704044788895744.0,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}