dmargutierrez's picture
Training in progress, epoch 1
31c3149
{
"_name_or_path": "distilbert-base-multilingual-cased",
"activation": "gelu",
"architectures": [
"DistilBertForTokenClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "O",
"1": "B-BUILDING",
"2": "I-BUILDING",
"3": "B-CITY",
"4": "I-CITY",
"5": "B-COUNTRY",
"6": "I-COUNTRY",
"7": "B-PLACE",
"8": "I-PLACE",
"9": "B-TERRITORY",
"10": "I-TERRITORY",
"11": "I-UNIT",
"12": "B-UNIT",
"13": "B-VALUE",
"14": "I-VALUE",
"15": "B-YEAR",
"16": "I-YEAR",
"17": "B-STANDARD ABBREVIATION",
"18": "I-STANDARD ABBREVIATION",
"19": "B-MONTH",
"20": "I-MONTH",
"21": "B-DAY",
"22": "I-DAY",
"23": "B-AGE",
"24": "I-AGE",
"25": "B-ETHNIC CATEGORY",
"26": "I-ETHNIC CATEGORY",
"27": "B-FAMILY NAME",
"28": "I-FAMILY NAME",
"29": "B-INITIAL NAME",
"30": "I-INITIAL NAME",
"31": "B-MARITAL STATUS",
"32": "I-MARITAL STATUS",
"33": "B-PROFESSION",
"34": "I-PROFESSION",
"35": "B-ROLE",
"36": "I-ROLE",
"37": "B-NATIONALITY",
"38": "I-NATIONALITY",
"39": "B-TITLE",
"40": "I-TITLE",
"41": "B-URL",
"42": "I-URL",
"43": "B-TYPE",
"44": "I-TYPE"
},
"initializer_range": 0.02,
"label2id": {
"B-AGE": 23,
"B-BUILDING": 1,
"B-CITY": 3,
"B-COUNTRY": 5,
"B-DAY": 21,
"B-ETHNIC CATEGORY": 25,
"B-FAMILY NAME": 27,
"B-INITIAL NAME": 29,
"B-MARITAL STATUS": 31,
"B-MONTH": 19,
"B-NATIONALITY": 37,
"B-PLACE": 7,
"B-PROFESSION": 33,
"B-ROLE": 35,
"B-STANDARD ABBREVIATION": 17,
"B-TERRITORY": 9,
"B-TITLE": 39,
"B-TYPE": 43,
"B-UNIT": 12,
"B-URL": 41,
"B-VALUE": 13,
"B-YEAR": 15,
"I-AGE": 24,
"I-BUILDING": 2,
"I-CITY": 4,
"I-COUNTRY": 6,
"I-DAY": 22,
"I-ETHNIC CATEGORY": 26,
"I-FAMILY NAME": 28,
"I-INITIAL NAME": 30,
"I-MARITAL STATUS": 32,
"I-MONTH": 20,
"I-NATIONALITY": 38,
"I-PLACE": 8,
"I-PROFESSION": 34,
"I-ROLE": 36,
"I-STANDARD ABBREVIATION": 18,
"I-TERRITORY": 10,
"I-TITLE": 40,
"I-TYPE": 44,
"I-UNIT": 11,
"I-URL": 42,
"I-VALUE": 14,
"I-YEAR": 16,
"O": 0
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"output_past": true,
"pad_token_id": 0,
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.26.0",
"vocab_size": 119547
}