{ | |
"config": { | |
"adapter_residual_before_ln": false, | |
"cross_adapter": false, | |
"dropout": 0.0, | |
"factorized_phm_W": true, | |
"factorized_phm_rule": false, | |
"hypercomplex_nonlinearity": "glorot-uniform", | |
"init_weights": "bert", | |
"inv_adapter": null, | |
"inv_adapter_reduction_factor": null, | |
"is_parallel": false, | |
"learn_phm": true, | |
"leave_out": [], | |
"ln_after": false, | |
"ln_before": false, | |
"mh_adapter": false, | |
"non_linearity": "relu", | |
"original_ln_after": true, | |
"original_ln_before": true, | |
"output_adapter": true, | |
"phm_bias": true, | |
"phm_c_init": "normal", | |
"phm_dim": 4, | |
"phm_init_range": 0.0001, | |
"phm_layer": false, | |
"phm_rank": 1, | |
"reduction_factor": 16, | |
"residual_before_ln": true, | |
"scaling": 1.0, | |
"shared_W_phm": false, | |
"shared_phm_rule": true, | |
"use_gating": false | |
}, | |
"hidden_size": 768, | |
"model_class": "RobertaAdapterModel", | |
"model_name": "roberta-base", | |
"model_type": "roberta", | |
"name": "hellaswag", | |
"version": "0.2.0" | |
} |