mgrella's picture
Upload config.json
91ef144
{
"_num_labels": 17,
"architectures": [
"BertForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "01000000",
"1": "10000000",
"2": "11000000",
"3": "12000000",
"4": "13000000",
"5": "14000000",
"6": "15000000",
"7": "16000000",
"8": "17000000",
"9": "02000000",
"10": "03000000",
"11": "04000000",
"12": "05000000",
"13": "06000000",
"14": "07000000",
"15": "08000000",
"16": "09000000"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"01000000": 0,
"10000000": 1,
"11000000": 2,
"12000000": 3,
"13000000": 4,
"14000000": 5,
"15000000": 6,
"16000000": 7,
"17000000": 8,
"02000000": 9,
"03000000": 10,
"04000000": 11,
"05000000": 12,
"06000000": 13,
"07000000": 14,
"08000000": 15,
"09000000": 16
},
"layer_norm_eps": 1e-12,
"max_length": 64,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"padding": "max_length",
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.22.1",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 31102,
"organization": "NLP Odyssey"
}