{ | |
"_name_or_path": "PlanTL-GOB-ES/roberta-large-bne", | |
"architectures": [ | |
"RobertaForTokenClassification" | |
], | |
"attention_probs_dropout_prob": 0.0, | |
"bos_token_id": 0, | |
"classifier_dropout": null, | |
"eos_token_id": 2, | |
"gradient_checkpointing": false, | |
"hidden_act": "gelu", | |
"hidden_dropout_prob": 0.0, | |
"hidden_size": 1024, | |
"id2label": { | |
"0": "dp", | |
"1": "px", | |
"2": "nc", | |
"3": "vsn", | |
"4": "pn", | |
"5": "Fia", | |
"6": "vag", | |
"7": "dt", | |
"8": "Zm", | |
"9": "p0", | |
"10": "aq", | |
"11": "vmn", | |
"12": "da", | |
"13": "Fz", | |
"14": "rn", | |
"15": "Fs", | |
"16": "Fp", | |
"17": "Fh", | |
"18": "vsi", | |
"19": "pe", | |
"20": "vas", | |
"21": "vap", | |
"22": "pi", | |
"23": "Y", | |
"24": "Fe", | |
"25": "de", | |
"26": "pd", | |
"27": "pp", | |
"28": "i", | |
"29": "X", | |
"30": "cc", | |
"31": "dn", | |
"32": "Fit", | |
"33": "I", | |
"34": "ao", | |
"35": "vai", | |
"36": "Fx", | |
"37": "vms", | |
"38": "sp", | |
"39": "Fpa", | |
"40": "Fpt", | |
"41": "vmg", | |
"42": "di", | |
"43": "vsp", | |
"44": "pt", | |
"45": "vsg", | |
"46": "dd", | |
"47": "Fat", | |
"48": "rg", | |
"49": "vss", | |
"50": "Faa", | |
"51": "vsm", | |
"52": "np", | |
"53": "W", | |
"54": "vmi", | |
"55": "vmp", | |
"56": "van", | |
"57": "Fg", | |
"58": "vam", | |
"59": "pr", | |
"60": "Fd", | |
"61": "Z", | |
"62": "vmm", | |
"63": "Fc", | |
"64": "Zp", | |
"65": "cs" | |
}, | |
"initializer_range": 0.02, | |
"intermediate_size": 4096, | |
"label2id": { | |
"Faa": 50, | |
"Fat": 47, | |
"Fc": 63, | |
"Fd": 60, | |
"Fe": 24, | |
"Fg": 57, | |
"Fh": 17, | |
"Fia": 5, | |
"Fit": 32, | |
"Fp": 16, | |
"Fpa": 39, | |
"Fpt": 40, | |
"Fs": 15, | |
"Fx": 36, | |
"Fz": 13, | |
"I": 33, | |
"W": 53, | |
"X": 29, | |
"Y": 23, | |
"Z": 61, | |
"Zm": 8, | |
"Zp": 64, | |
"ao": 34, | |
"aq": 10, | |
"cc": 30, | |
"cs": 65, | |
"da": 12, | |
"dd": 46, | |
"de": 25, | |
"di": 42, | |
"dn": 31, | |
"dp": 0, | |
"dt": 7, | |
"i": 28, | |
"nc": 2, | |
"np": 52, | |
"p0": 9, | |
"pd": 26, | |
"pe": 19, | |
"pi": 22, | |
"pn": 4, | |
"pp": 27, | |
"pr": 59, | |
"pt": 44, | |
"px": 1, | |
"rg": 48, | |
"rn": 14, | |
"sp": 38, | |
"vag": 6, | |
"vai": 35, | |
"vam": 58, | |
"van": 56, | |
"vap": 21, | |
"vas": 20, | |
"vmg": 41, | |
"vmi": 54, | |
"vmm": 62, | |
"vmn": 11, | |
"vmp": 55, | |
"vms": 37, | |
"vsg": 45, | |
"vsi": 18, | |
"vsm": 51, | |
"vsn": 3, | |
"vsp": 43, | |
"vss": 49 | |
}, | |
"layer_norm_eps": 1e-05, | |
"max_position_embeddings": 514, | |
"model_type": "roberta", | |
"num_attention_heads": 16, | |
"num_hidden_layers": 24, | |
"pad_token_id": 1, | |
"position_embedding_type": "absolute", | |
"torch_dtype": "float32", | |
"transformers_version": "4.44.2", | |
"type_vocab_size": 1, | |
"use_cache": true, | |
"vocab_size": 50262 | |
} | |