xshubhamx's picture
Upload folder using huggingface_hub
1f72460 verified
raw
history blame
1.44 kB
{
"best_metric": 0.4818756771742453,
"best_model_checkpoint": "xlnet-base-cased/checkpoint-643",
"epoch": 1.0,
"eval_steps": 500,
"global_step": 643,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.78,
"learning_rate": 4.871695178849145e-05,
"loss": 1.2613,
"step": 500
},
{
"epoch": 1.0,
"eval_accuracy": 0.7676219984508134,
"eval_f1_macro": 0.4818756771742453,
"eval_f1_micro": 0.7676219984508135,
"eval_f1_weighted": 0.7524385212708887,
"eval_loss": 0.775787353515625,
"eval_macro_fpr": 0.022019799606126652,
"eval_macro_sensitivity": 0.5128622254580788,
"eval_macro_specificity": 0.9823717840367622,
"eval_precision": 0.7672637518290021,
"eval_precision_macro": 0.5269420024270713,
"eval_recall": 0.7676219984508134,
"eval_recall_macro": 0.5128622254580788,
"eval_runtime": 65.0712,
"eval_samples_per_second": 19.84,
"eval_steps_per_second": 2.49,
"eval_weighted_fpr": 0.021165514321998025,
"eval_weighted_sensitivity": 0.7676219984508134,
"eval_weighted_specificity": 0.9679547621006213,
"step": 643
}
],
"logging_steps": 500,
"max_steps": 19290,
"num_train_epochs": 30,
"save_steps": 500,
"total_flos": 1463872668346368.0,
"trial_name": null,
"trial_params": null
}