madlag's picture
Adding model, graphs and metadata.
c28a801
raw
history blame
11.9 kB
{
"checkpoint_path": "/data_2to/devel_data/nn_pruning/output/squad_test_final_fine_tune/fine_tuned_hp_od-output__squad_test3_es-steps_nte20_ls250_est5000_rn-output__squad_test3_dpm-sigmoied_threshold:1d_alt_apme-sigmoied_threshold_aowd0_bm1_abr32_abc32_it0_fw10_r-l1_rfl20_dl0.25_dtnop-csarron__bert-base-uncased-squad-v1/checkpoint-22132",
"config": {
"_name_or_path": "/tmp/tmp61klxvbz",
"architectures": ["BertForQuestionAnswering"],
"attention_probs_dropout_prob": 0.1,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"pruned_heads": {
"0": [0, 2, 4, 5, 6, 7, 9, 11],
"1": [0, 2, 3, 5, 6, 7, 8, 9],
"10": [1, 4, 5, 6, 7, 8, 9],
"11": [0, 2, 3, 5, 6, 7, 8, 10, 11],
"2": [1, 2, 4, 7, 8, 11],
"3": [2, 4, 6, 7, 10],
"4": [0, 1, 2, 6, 11],
"5": [0, 1, 2, 5, 6, 7, 11],
"6": [0, 2, 3, 4, 7, 10],
"7": [1, 3, 6, 7, 11],
"8": [0, 1, 2, 3, 4, 6, 8],
"9": [1, 3, 4, 5, 7, 9, 10]
},
"type_vocab_size": 2,
"vocab_size": 30522
},
"eval_metrics": {
"exact_match": 80.02838221381268,
"f1": 87.70940223967354,
"main_metric": 87.70940223967354
},
"model_args": {
"cache_dir": null,
"config_name": null,
"model_name_or_path": "/home/lagunas/devel/hf/nn_pruning/nn_pruning/analysis/tmp_finetune",
"tokenizer_name": null
},
"source_checkpoint": "/data_2to/devel_data/nn_pruning/output/squad_test3/hp_od-output__squad_test3_es-steps_nte20_ls250_est5000_rn-output__squad_test3_dpm-sigmoied_threshold:1d_alt_apme-sigmoied_threshold_aowd0_bm1_abr32_abc32_it0_fw10_r-l1_rfl20_dl0.25_dtnop-csarron__bert-base-uncased-squad-v1/checkpoint-90000",
"sparse_args": {
"ampere_pruning_method": "disabled",
"attention_block_cols": 1,
"attention_block_rows": 1,
"attention_lambda": 1.0,
"attention_output_with_dense": 0,
"attention_pruning_method": "topK",
"bias_mask": true,
"dense_block_cols": 1,
"dense_block_rows": 1,
"dense_lambda": 1.0,
"dense_pruning_method": "topK",
"distil_alpha_ce": 0.1,
"distil_alpha_teacher": 0.9,
"distil_teacher_name_or_path": "csarron/bert-base-uncased-squad-v1",
"distil_temperature": 2.0,
"final_ampere_temperature": 20.0,
"final_finetune": 1,
"final_threshold": 0.5,
"final_warmup": 0,
"initial_ampere_temperature": 0.0,
"initial_threshold": 1.0,
"initial_warmup": 0,
"mask_init": "constant",
"mask_scale": 0.0,
"mask_scores_learning_rate": 0.01,
"regularization": "",
"regularization_final_lambda": 0
},
"speed": {
"cuda_eval_elapsed_time": 15.838374267578125,
"eval_elapsed_time": 22.999519595876336
},
"speedup": 2.436764806371294,
"stats": {
"layers": {
"0": {
"linear_attention_nnz": 786432,
"linear_attention_total": 2359296,
"linear_dense_nnz": 847872,
"linear_dense_total": 4718592,
"linear_nnz": 1634304,
"linear_total": 7077888,
"nnz": 1640232,
"total": 7086336
},
"1": {
"linear_attention_nnz": 786432,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1101312,
"linear_dense_total": 4718592,
"linear_nnz": 1887744,
"linear_total": 7077888,
"nnz": 1893837,
"total": 7086336
},
"10": {
"linear_attention_nnz": 983040,
"linear_attention_total": 2359296,
"linear_dense_nnz": 147456,
"linear_dense_total": 4718592,
"linear_nnz": 1130496,
"linear_total": 7077888,
"nnz": 1136160,
"total": 7086528
},
"11": {
"linear_attention_nnz": 589824,
"linear_attention_total": 2359296,
"linear_dense_nnz": 365568,
"linear_dense_total": 4718592,
"linear_nnz": 955392,
"linear_total": 7077888,
"nnz": 960814,
"total": 7086144
},
"2": {
"linear_attention_nnz": 1179648,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1221120,
"linear_dense_total": 4718592,
"linear_nnz": 2400768,
"linear_total": 7077888,
"nnz": 2407323,
"total": 7086720
},
"3": {
"linear_attention_nnz": 1376256,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1211904,
"linear_dense_total": 4718592,
"linear_nnz": 2588160,
"linear_total": 7077888,
"nnz": 2594901,
"total": 7086912
},
"4": {
"linear_attention_nnz": 1376256,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1279488,
"linear_dense_total": 4718592,
"linear_nnz": 2655744,
"linear_total": 7077888,
"nnz": 2662529,
"total": 7086912
},
"5": {
"linear_attention_nnz": 983040,
"linear_attention_total": 2359296,
"linear_dense_nnz": 1216512,
"linear_dense_total": 4718592,
"linear_nnz": 2199552,
"linear_total": 7077888,
"nnz": 2205912,
"total": 7086528
},
"6": {
"linear_attention_nnz": 1179648,
"linear_attention_total": 2359296,
"linear_dense_nnz": 952320,
"linear_dense_total": 4718592,
"linear_nnz": 2131968,
"linear_total": 7077888,
"nnz": 2138348,
"total": 7086720
},
"7": {
"linear_attention_nnz": 1376256,
"linear_attention_total": 2359296,
"linear_dense_nnz": 715776,
"linear_dense_total": 4718592,
"linear_nnz": 2092032,
"linear_total": 7077888,
"nnz": 2098450,
"total": 7086912
},
"8": {
"linear_attention_nnz": 983040,
"linear_attention_total": 2359296,
"linear_dense_nnz": 434688,
"linear_dense_total": 4718592,
"linear_nnz": 1417728,
"linear_total": 7077888,
"nnz": 1423579,
"total": 7086528
},
"9": {
"linear_attention_nnz": 983040,
"linear_attention_total": 2359296,
"linear_dense_nnz": 172032,
"linear_dense_total": 4718592,
"linear_nnz": 1155072,
"linear_total": 7077888,
"nnz": 1160752,
"total": 7086528
}
},
"linear_nnz": 22248960,
"linear_sparsity": 73.80461516203704,
"linear_total": 84934656,
"nnz": 46161559,
"pruned_heads": {
"0": [0, 2, 4, 5, 6, 7, 9, 11],
"1": [0, 2, 3, 5, 6, 7, 8, 9],
"10": [1, 4, 5, 6, 7, 8, 9],
"11": [0, 2, 3, 5, 6, 7, 8, 10, 11],
"2": [1, 2, 4, 7, 8, 11],
"3": [2, 4, 6, 7, 10],
"4": [0, 1, 2, 6, 11],
"5": [0, 1, 2, 5, 6, 7, 11],
"6": [0, 2, 3, 4, 7, 10],
"7": [1, 3, 6, 7, 11],
"8": [0, 1, 2, 3, 4, 6, 8],
"9": [1, 3, 4, 5, 7, 9, 10]
},
"total": 108877826,
"total_sparsity": 57.602424023418685
},
"training_args": {
"_n_gpu": -1,
"adafactor": false,
"adam_beta1": 0.9,
"adam_beta2": 0.999,
"adam_epsilon": 1e-08,
"dataloader_drop_last": false,
"dataloader_num_workers": 0,
"dataloader_pin_memory": true,
"ddp_find_unused_parameters": null,
"debug": false,
"deepspeed": null,
"disable_tqdm": false,
"do_eval": 1,
"do_predict": false,
"do_train": 1,
"eval_accumulation_steps": null,
"eval_steps": 2500,
"evaluation_strategy": "steps",
"fp16": false,
"fp16_backend": "auto",
"fp16_full_eval": false,
"fp16_opt_level": "O1",
"gradient_accumulation_steps": 1,
"greater_is_better": null,
"group_by_length": false,
"ignore_data_skip": false,
"label_names": null,
"label_smoothing_factor": 0.0,
"learning_rate": 3e-05,
"length_column_name": "length",
"load_best_model_at_end": false,
"local_rank": -1,
"logging_dir": "/data_2to/devel_data/nn_pruning/output/squad_test_final_fine_tune/fine_tuned_hp_od-output__squad_test3_es-steps_nte20_ls250_est5000_rn-output__squad_test3_dpm-sigmoied_threshold:1d_alt_apme-sigmoied_threshold_aowd0_bm1_abr32_abc32_it0_fw10_r-l1_rfl20_dl0.25_dtnop-csarron__bert-base-uncased-squad-v1",
"logging_first_step": false,
"logging_steps": 250,
"logging_strategy": "steps",
"lr_scheduler_type": "linear",
"max_grad_norm": 1.0,
"max_steps": -1,
"metric_for_best_model": null,
"mp_parameters": "",
"no_cuda": false,
"num_train_epochs": 4,
"optimize_model_before_eval": "disabled",
"output_dir": "/data_2to/devel_data/nn_pruning/output/squad_test_final_fine_tune/fine_tuned_hp_od-output__squad_test3_es-steps_nte20_ls250_est5000_rn-output__squad_test3_dpm-sigmoied_threshold:1d_alt_apme-sigmoied_threshold_aowd0_bm1_abr32_abc32_it0_fw10_r-l1_rfl20_dl0.25_dtnop-csarron__bert-base-uncased-squad-v1",
"overwrite_output_dir": 1,
"past_index": -1,
"per_device_eval_batch_size": 8,
"per_device_train_batch_size": 16,
"per_gpu_eval_batch_size": null,
"per_gpu_train_batch_size": null,
"prediction_loss_only": false,
"remove_unused_columns": true,
"report_to": null,
"run_name": "/data_2to/devel_data/nn_pruning/output/squad_test_final_fine_tune/fine_tuned_hp_od-output__squad_test3_es-steps_nte20_ls250_est5000_rn-output__squad_test3_dpm-sigmoied_threshold:1d_alt_apme-sigmoied_threshold_aowd0_bm1_abr32_abc32_it0_fw10_r-l1_rfl20_dl0.25_dtnop-csarron__bert-base-uncased-squad-v1",
"save_steps": 2500,
"save_strategy": "steps",
"save_total_limit": 50,
"seed": 17,
"sharded_ddp": "",
"skip_memory_metrics": false,
"tpu_metrics_debug": false,
"tpu_num_cores": null,
"warmup_ratio": 0.0,
"warmup_steps": 10,
"weight_decay": 0.0
}
}