File size: 2,177 Bytes
9b90ea6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
{
"_name_or_path": "Team-PIXEL/pixel-base",
"architectures": [
"PIXELForBiaffineParsing"
],
"attention_probs_dropout_prob": 0.1,
"decoder_hidden_size": 512,
"decoder_intermediate_size": 2048,
"decoder_num_attention_heads": 16,
"decoder_num_hidden_layers": 8,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "_",
"1": "acl",
"2": "advcl",
"3": "advmod",
"4": "amod",
"5": "appos",
"6": "aux",
"7": "case",
"8": "cc",
"9": "ccomp",
"10": "clf",
"11": "compound",
"12": "conj",
"13": "cop",
"14": "csubj",
"15": "dep",
"16": "det",
"17": "discourse",
"18": "dislocated",
"19": "expl",
"20": "fixed",
"21": "flat",
"22": "goeswith",
"23": "iobj",
"24": "list",
"25": "mark",
"26": "nmod",
"27": "nsubj",
"28": "nummod",
"29": "obj",
"30": "obl",
"31": "orphan",
"32": "parataxis",
"33": "punct",
"34": "reparandum",
"35": "root",
"36": "vocative",
"37": "xcomp"
},
"image_size": [
16,
4096
],
"initializer_range": 0.02,
"intermediate_size": 3072,
"interpolate_pos_encoding": false,
"label2id": {
"_": 0,
"acl": 1,
"advcl": 2,
"advmod": 3,
"amod": 4,
"appos": 5,
"aux": 6,
"case": 7,
"cc": 8,
"ccomp": 9,
"clf": 10,
"compound": 11,
"conj": 12,
"cop": 13,
"csubj": 14,
"dep": 15,
"det": 16,
"discourse": 17,
"dislocated": 18,
"expl": 19,
"fixed": 20,
"flat": 21,
"goeswith": 22,
"iobj": 23,
"list": 24,
"mark": 25,
"nmod": 26,
"nsubj": 27,
"nummod": 28,
"obj": 29,
"obl": 30,
"orphan": 31,
"parataxis": 32,
"punct": 33,
"reparandum": 34,
"root": 35,
"vocative": 36,
"xcomp": 37
},
"layer_norm_eps": 1e-12,
"mask_ratio": 0.25,
"model_type": "pixel",
"norm_pix_loss": true,
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"pad_token_id": -100,
"patch_size": 16,
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.17.0"
}
|