asahi417 commited on
Commit
97b0ea3
1 Parent(s): 301b650
Files changed (2) hide show
  1. config.json +239 -0
  2. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "tner_ckpt/btc_deberta_v3_large/best_model",
3
+ "architectures": [
4
+ "DebertaV2ForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "crf_state_dict": {
8
+ "_constraint_mask": [
9
+ [
10
+ 1.0,
11
+ 1.0,
12
+ 1.0,
13
+ 1.0,
14
+ 0.0,
15
+ 0.0,
16
+ 1.0,
17
+ 0.0,
18
+ 1.0
19
+ ],
20
+ [
21
+ 1.0,
22
+ 1.0,
23
+ 1.0,
24
+ 0.0,
25
+ 1.0,
26
+ 0.0,
27
+ 1.0,
28
+ 0.0,
29
+ 1.0
30
+ ],
31
+ [
32
+ 1.0,
33
+ 1.0,
34
+ 1.0,
35
+ 0.0,
36
+ 0.0,
37
+ 1.0,
38
+ 1.0,
39
+ 0.0,
40
+ 1.0
41
+ ],
42
+ [
43
+ 1.0,
44
+ 1.0,
45
+ 1.0,
46
+ 1.0,
47
+ 0.0,
48
+ 0.0,
49
+ 1.0,
50
+ 0.0,
51
+ 1.0
52
+ ],
53
+ [
54
+ 1.0,
55
+ 1.0,
56
+ 1.0,
57
+ 0.0,
58
+ 1.0,
59
+ 0.0,
60
+ 1.0,
61
+ 0.0,
62
+ 1.0
63
+ ],
64
+ [
65
+ 1.0,
66
+ 1.0,
67
+ 1.0,
68
+ 0.0,
69
+ 0.0,
70
+ 1.0,
71
+ 1.0,
72
+ 0.0,
73
+ 1.0
74
+ ],
75
+ [
76
+ 1.0,
77
+ 1.0,
78
+ 1.0,
79
+ 0.0,
80
+ 0.0,
81
+ 0.0,
82
+ 1.0,
83
+ 0.0,
84
+ 1.0
85
+ ],
86
+ [
87
+ 1.0,
88
+ 1.0,
89
+ 1.0,
90
+ 0.0,
91
+ 0.0,
92
+ 0.0,
93
+ 1.0,
94
+ 0.0,
95
+ 0.0
96
+ ],
97
+ [
98
+ 0.0,
99
+ 0.0,
100
+ 0.0,
101
+ 0.0,
102
+ 0.0,
103
+ 0.0,
104
+ 0.0,
105
+ 0.0,
106
+ 0.0
107
+ ]
108
+ ],
109
+ "end_transitions": [
110
+ -1.0148895978927612,
111
+ 1.0671864748001099,
112
+ -1.6164472103118896,
113
+ 0.466050386428833,
114
+ -0.012239390052855015,
115
+ -1.1088250875473022,
116
+ 0.40975475311279297
117
+ ],
118
+ "start_transitions": [
119
+ 0.47314950823783875,
120
+ 0.201548233628273,
121
+ 1.5476996898651123,
122
+ -1.4331055879592896,
123
+ -1.549608826637268,
124
+ 0.6565876603126526,
125
+ 0.8080530166625977
126
+ ],
127
+ "transitions": [
128
+ [
129
+ -0.41058361530303955,
130
+ 0.1702241599559784,
131
+ -0.39300161600112915,
132
+ -0.2474871426820755,
133
+ 0.8343972563743591,
134
+ 0.3032238185405731,
135
+ 0.2561141848564148
136
+ ],
137
+ [
138
+ -0.11347199231386185,
139
+ -0.24199432134628296,
140
+ 0.031227607280015945,
141
+ 0.3765334486961365,
142
+ 0.019392093643546104,
143
+ 0.7591969966888428,
144
+ 0.5199909210205078
145
+ ],
146
+ [
147
+ 0.3379369378089905,
148
+ -0.743718147277832,
149
+ -0.1221601814031601,
150
+ -0.31503763794898987,
151
+ 0.7697550058364868,
152
+ 0.4699808657169342,
153
+ -0.07055890560150146
154
+ ],
155
+ [
156
+ 0.46931782364845276,
157
+ 0.39986705780029297,
158
+ 0.5056431889533997,
159
+ 0.2596401870250702,
160
+ -0.05581130459904671,
161
+ -0.18991011381149292,
162
+ 0.4412626326084137
163
+ ],
164
+ [
165
+ -0.41374099254608154,
166
+ 0.19209060072898865,
167
+ -0.4061834514141083,
168
+ -0.0016257184324786067,
169
+ -0.28092607855796814,
170
+ -0.21716837584972382,
171
+ 0.23422983288764954
172
+ ],
173
+ [
174
+ 0.16249980032444,
175
+ -0.8426260352134705,
176
+ 0.12414605170488358,
177
+ 0.7118894457817078,
178
+ 0.1571020483970642,
179
+ 0.21087680757045746,
180
+ 0.5860539674758911
181
+ ],
182
+ [
183
+ -0.42638248205184937,
184
+ -0.16581180691719055,
185
+ 0.51979660987854,
186
+ 0.4090186059474945,
187
+ 0.026761069893836975,
188
+ 0.04246059060096741,
189
+ 0.6112445592880249
190
+ ]
191
+ ]
192
+ },
193
+ "hidden_act": "gelu",
194
+ "hidden_dropout_prob": 0.1,
195
+ "hidden_size": 1024,
196
+ "id2label": {
197
+ "0": "B-LOC",
198
+ "1": "B-ORG",
199
+ "2": "B-PER",
200
+ "3": "I-LOC",
201
+ "4": "I-ORG",
202
+ "5": "I-PER",
203
+ "6": "O"
204
+ },
205
+ "initializer_range": 0.02,
206
+ "intermediate_size": 4096,
207
+ "label2id": {
208
+ "B-LOC": 0,
209
+ "B-ORG": 1,
210
+ "B-PER": 2,
211
+ "I-LOC": 3,
212
+ "I-ORG": 4,
213
+ "I-PER": 5,
214
+ "O": 6
215
+ },
216
+ "layer_norm_eps": 1e-07,
217
+ "max_position_embeddings": 512,
218
+ "max_relative_positions": -1,
219
+ "model_type": "deberta-v2",
220
+ "norm_rel_ebd": "layer_norm",
221
+ "num_attention_heads": 16,
222
+ "num_hidden_layers": 24,
223
+ "pad_token_id": 0,
224
+ "pooler_dropout": 0,
225
+ "pooler_hidden_act": "gelu",
226
+ "pooler_hidden_size": 1024,
227
+ "pos_att_type": [
228
+ "p2c",
229
+ "c2p"
230
+ ],
231
+ "position_biased_input": false,
232
+ "position_buckets": 256,
233
+ "relative_attention": true,
234
+ "share_att_key": true,
235
+ "torch_dtype": "float32",
236
+ "transformers_version": "4.20.1",
237
+ "type_vocab_size": 0,
238
+ "vocab_size": 128100
239
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa770c2d72e8ce121994409b7d7089b221e807ca730f279b7f0e441471fe3ac7
3
+ size 1736214831