{ "_name_or_path": "PlanTL-GOB-ES/roberta-large-bne", "architectures": [ "RobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.0, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.0, "hidden_size": 1024, "id2label": { "0": "Z", "1": "vsp", "2": "vms", "3": "vsg", "4": "vss", "5": "Fc", "6": "vmn", "7": "Fpt", "8": "rn", "9": "dp", "10": "vas", "11": "sp", "12": "vsm", "13": "vam", "14": "Fs", "15": "vai", "16": "vag", "17": "np", "18": "Fd", "19": "pi", "20": "Fp", "21": "pr", "22": "Zm", "23": "vap", "24": "dt", "25": "I", "26": "ao", "27": "Fh", "28": "i", "29": "de", "30": "dn", "31": "pe", "32": "van", "33": "aq", "34": "nc", "35": "dd", "36": "da", "37": "vmm", "38": "vsn", "39": "px", "40": "vmi", "41": "cc", "42": "Fat", "43": "pd", "44": "pn", "45": "Fe", "46": "vmp", "47": "Fz", "48": "pp", "49": "Faa", "50": "Fpa", "51": "W", "52": "Fit", "53": "Fx", "54": "cs", "55": "di", "56": "Fg", "57": "vmg", "58": "Y", "59": "rg", "60": "p0", "61": "Zp", "62": "X", "63": "Fia", "64": "vsi", "65": "pt" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "Faa": 49, "Fat": 42, "Fc": 5, "Fd": 18, "Fe": 45, "Fg": 56, "Fh": 27, "Fia": 63, "Fit": 52, "Fp": 20, "Fpa": 50, "Fpt": 7, "Fs": 14, "Fx": 53, "Fz": 47, "I": 25, "W": 51, "X": 62, "Y": 58, "Z": 0, "Zm": 22, "Zp": 61, "ao": 26, "aq": 33, "cc": 41, "cs": 54, "da": 36, "dd": 35, "de": 29, "di": 55, "dn": 30, "dp": 9, "dt": 24, "i": 28, "nc": 34, "np": 17, "p0": 60, "pd": 43, "pe": 31, "pi": 19, "pn": 44, "pp": 48, "pr": 21, "pt": 65, "px": 39, "rg": 59, "rn": 8, "sp": 11, "vag": 16, "vai": 15, "vam": 13, "van": 32, "vap": 23, "vas": 10, "vmg": 57, "vmi": 40, "vmm": 37, "vmn": 6, "vmp": 46, "vms": 2, "vsg": 3, "vsi": 64, "vsm": 12, "vsn": 38, "vsp": 1, "vss": 4 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.44.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 50262 }