{ "architectures": [ "XLMRobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.1, "finetuning_task": null, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0" : "B-LOC", "1" : "B-MISC", "2" : "B-ORG", "3" : "B-PER", "4" : "I-LOC", "5" : "I-MISC", "6" : "I-ORG", "7" : "I-PER", "8" : "O" }, "initializer_range": 0.02, "intermediate_size": 4096, "is_decoder": false, "label2id": { "B-LOC" : 0, "B-MISC" : 1, "B-ORG" : 2, "B-PER" : 3, "I-LOC" : 4, "I-MISC" : 5, "I-ORG" : 6, "I-PER" : 7, "O" : 8 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "num_attention_heads": 16, "num_hidden_layers": 24, "num_labels": 9, "output_attentions": false, "output_hidden_states": false, "output_past": true, "pruned_heads": {}, "torchscript": false, "type_vocab_size": 1, "use_bfloat16": false, "vocab_size": 250002 }