{ | |
"_name_or_path": "PlanTL-GOB-ES/roberta-large-bne", | |
"architectures": [ | |
"RobertaForTokenClassification" | |
], | |
"attention_probs_dropout_prob": 0.0, | |
"bos_token_id": 0, | |
"classifier_dropout": null, | |
"eos_token_id": 2, | |
"gradient_checkpointing": false, | |
"hidden_act": "gelu", | |
"hidden_dropout_prob": 0.0, | |
"hidden_size": 1024, | |
"id2label": { | |
"0": "nc", | |
"1": "vai", | |
"2": "Fpt", | |
"3": "sp", | |
"4": "dd", | |
"5": "vss", | |
"6": "X", | |
"7": "rn", | |
"8": "Y", | |
"9": "pe", | |
"10": "Fpa", | |
"11": "vsp", | |
"12": "vap", | |
"13": "pt", | |
"14": "Fe", | |
"15": "Faa", | |
"16": "Fz", | |
"17": "Fat", | |
"18": "vsm", | |
"19": "cc", | |
"20": "pd", | |
"21": "vas", | |
"22": "vmi", | |
"23": "Zm", | |
"24": "vms", | |
"25": "Fc", | |
"26": "vsg", | |
"27": "Fia", | |
"28": "Fs", | |
"29": "de", | |
"30": "di", | |
"31": "vsn", | |
"32": "i", | |
"33": "vam", | |
"34": "pr", | |
"35": "px", | |
"36": "vmn", | |
"37": "Fp", | |
"38": "ao", | |
"39": "Fh", | |
"40": "Fd", | |
"41": "pi", | |
"42": "Z", | |
"43": "vmp", | |
"44": "pn", | |
"45": "rg", | |
"46": "Zp", | |
"47": "W", | |
"48": "vmg", | |
"49": "Fit", | |
"50": "dn", | |
"51": "pp", | |
"52": "vmm", | |
"53": "vag", | |
"54": "vsi", | |
"55": "I", | |
"56": "aq", | |
"57": "Fg", | |
"58": "van", | |
"59": "dt", | |
"60": "dp", | |
"61": "p0", | |
"62": "da", | |
"63": "Fx", | |
"64": "np", | |
"65": "cs" | |
}, | |
"initializer_range": 0.02, | |
"intermediate_size": 4096, | |
"label2id": { | |
"Faa": 15, | |
"Fat": 17, | |
"Fc": 25, | |
"Fd": 40, | |
"Fe": 14, | |
"Fg": 57, | |
"Fh": 39, | |
"Fia": 27, | |
"Fit": 49, | |
"Fp": 37, | |
"Fpa": 10, | |
"Fpt": 2, | |
"Fs": 28, | |
"Fx": 63, | |
"Fz": 16, | |
"I": 55, | |
"W": 47, | |
"X": 6, | |
"Y": 8, | |
"Z": 42, | |
"Zm": 23, | |
"Zp": 46, | |
"ao": 38, | |
"aq": 56, | |
"cc": 19, | |
"cs": 65, | |
"da": 62, | |
"dd": 4, | |
"de": 29, | |
"di": 30, | |
"dn": 50, | |
"dp": 60, | |
"dt": 59, | |
"i": 32, | |
"nc": 0, | |
"np": 64, | |
"p0": 61, | |
"pd": 20, | |
"pe": 9, | |
"pi": 41, | |
"pn": 44, | |
"pp": 51, | |
"pr": 34, | |
"pt": 13, | |
"px": 35, | |
"rg": 45, | |
"rn": 7, | |
"sp": 3, | |
"vag": 53, | |
"vai": 1, | |
"vam": 33, | |
"van": 58, | |
"vap": 12, | |
"vas": 21, | |
"vmg": 48, | |
"vmi": 22, | |
"vmm": 52, | |
"vmn": 36, | |
"vmp": 43, | |
"vms": 24, | |
"vsg": 26, | |
"vsi": 54, | |
"vsm": 18, | |
"vsn": 31, | |
"vsp": 11, | |
"vss": 5 | |
}, | |
"layer_norm_eps": 1e-05, | |
"max_position_embeddings": 514, | |
"model_type": "roberta", | |
"num_attention_heads": 16, | |
"num_hidden_layers": 24, | |
"pad_token_id": 1, | |
"position_embedding_type": "absolute", | |
"torch_dtype": "float32", | |
"transformers_version": "4.44.2", | |
"type_vocab_size": 1, | |
"use_cache": true, | |
"vocab_size": 50262 | |
} | |