{ "_name_or_path": "PlanTL-GOB-ES/roberta-large-bne", "architectures": [ "RobertaForTokenClassification" ], "attention_probs_dropout_prob": 0.0, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.0, "hidden_size": 1024, "id2label": { "0": "rn", "1": "di", "2": "Fd", "3": "Fh", "4": "pn", "5": "Fp", "6": "ao", "7": "Fc", "8": "Fia", "9": "Fg", "10": "vss", "11": "Faa", "12": "I", "13": "Fat", "14": "pp", "15": "nc", "16": "vmp", "17": "Z", "18": "aq", "19": "px", "20": "dp", "21": "Zp", "22": "vap", "23": "vsn", "24": "vms", "25": "da", "26": "Fpa", "27": "vsi", "28": "vam", "29": "Y", "30": "vsg", "31": "dd", "32": "Fz", "33": "rg", "34": "vas", "35": "vsp", "36": "W", "37": "sp", "38": "vmn", "39": "vmm", "40": "p0", "41": "pt", "42": "Zm", "43": "dn", "44": "vai", "45": "vmi", "46": "Fit", "47": "i", "48": "cs", "49": "vsm", "50": "Fs", "51": "de", "52": "X", "53": "pr", "54": "cc", "55": "vmg", "56": "pi", "57": "Fe", "58": "Fpt", "59": "van", "60": "pe", "61": "vag", "62": "pd", "63": "np", "64": "Fx", "65": "dt" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "Faa": 11, "Fat": 13, "Fc": 7, "Fd": 2, "Fe": 57, "Fg": 9, "Fh": 3, "Fia": 8, "Fit": 46, "Fp": 5, "Fpa": 26, "Fpt": 58, "Fs": 50, "Fx": 64, "Fz": 32, "I": 12, "W": 36, "X": 52, "Y": 29, "Z": 17, "Zm": 42, "Zp": 21, "ao": 6, "aq": 18, "cc": 54, "cs": 48, "da": 25, "dd": 31, "de": 51, "di": 1, "dn": 43, "dp": 20, "dt": 65, "i": 47, "nc": 15, "np": 63, "p0": 40, "pd": 62, "pe": 60, "pi": 56, "pn": 4, "pp": 14, "pr": 53, "pt": 41, "px": 19, "rg": 33, "rn": 0, "sp": 37, "vag": 61, "vai": 44, "vam": 28, "van": 59, "vap": 22, "vas": 34, "vmg": 55, "vmi": 45, "vmm": 39, "vmn": 38, "vmp": 16, "vms": 24, "vsg": 30, "vsi": 27, "vsm": 49, "vsn": 23, "vsp": 35, "vss": 10 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 1, "position_embedding_type": "absolute", "torch_dtype": "float32", "transformers_version": "4.44.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 50262 }