bert-base-uncased-pf-cola / head_config.json
calpt's picture
Fix label2id mapping.
cc6a3a0
raw
history blame
388 Bytes
{
"config": {
"activation_function": "tanh",
"bias": true,
"head_type": "classification",
"label2id": {
"unacceptable": 0,
"acceptable": 1
},
"layers": 2,
"num_labels": 2,
"use_pooler": false
},
"hidden_size": 768,
"model_class": "BertModelWithHeads",
"model_name": "bert-base-uncased",
"model_type": "bert",
"name": "glue_cola"
}