GPT2-NL2SQL-Full / trainer_state.json
Slicky325's picture
Upload 7 files
86fdd6c verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.4258641493363617,
"eval_steps": 500,
"global_step": 6000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.10646603733409042,
"grad_norm": 0.3511381149291992,
"learning_rate": 4.81935110918419e-05,
"loss": 0.365,
"step": 1500
},
{
"epoch": 0.21293207466818084,
"grad_norm": 0.31686341762542725,
"learning_rate": 4.548377772960474e-05,
"loss": 0.1136,
"step": 3000
},
{
"epoch": 0.31939811200227125,
"grad_norm": 0.3509831726551056,
"learning_rate": 4.277404436736759e-05,
"loss": 0.1087,
"step": 4500
},
{
"epoch": 0.4258641493363617,
"grad_norm": 0.3466353118419647,
"learning_rate": 4.006431100513043e-05,
"loss": 0.1044,
"step": 6000
}
],
"logging_steps": 1500,
"max_steps": 28178,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 1500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.2288816668672e+16,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}