KoichiYasuoka commited on
Commit
6ed8143
1 Parent(s): 0210e41

model improved for transformers 4.42

Browse files
config.json CHANGED
@@ -2,25 +2,13 @@
2
  "architectures": [
3
  "MistralForTokenClassification"
4
  ],
5
- "attention_bias": false,
6
  "attention_dropout": 0.0,
7
- "auto_map": {
8
- "AutoModelForTokenClassification": "upos.MistralForTokenClassification"
9
- },
10
  "bos_token_id": 1,
11
  "custom_pipelines": {
12
  "upos": {
13
  "impl": "upos.BellmanFordTokenClassificationPipeline",
14
  "pt": "AutoModelForTokenClassification"
15
  },
16
- "token-classification": {
17
- "impl": "upos.RawTokenClassificationPipeline",
18
- "pt": "AutoModelForTokenClassification"
19
- },
20
- "ner": {
21
- "impl": "upos.RawTokenClassificationPipeline",
22
- "pt": "AutoModelForTokenClassification"
23
- }
24
  },
25
  "eos_token_id": 2,
26
  "hidden_act": "silu",
@@ -156,15 +144,13 @@
156
  "num_attention_heads": 32,
157
  "num_hidden_layers": 32,
158
  "num_key_value_heads": 8,
159
- "pretraining_tp": 1,
160
  "rms_norm_eps": 1e-05,
161
- "rope_scaling": null,
162
  "rope_theta": 10000.0,
163
  "sliding_window": 4096,
164
  "tie_word_embeddings": false,
165
  "tokenizer_class": "LlamaTokenizerFast",
166
  "torch_dtype": "float32",
167
- "transformers_version": "4.41.2",
168
  "use_cache": true,
169
  "vocab_size": 43317
170
  }
 
2
  "architectures": [
3
  "MistralForTokenClassification"
4
  ],
 
5
  "attention_dropout": 0.0,
 
 
 
6
  "bos_token_id": 1,
7
  "custom_pipelines": {
8
  "upos": {
9
  "impl": "upos.BellmanFordTokenClassificationPipeline",
10
  "pt": "AutoModelForTokenClassification"
11
  },
 
 
 
 
 
 
 
 
12
  },
13
  "eos_token_id": 2,
14
  "hidden_act": "silu",
 
144
  "num_attention_heads": 32,
145
  "num_hidden_layers": 32,
146
  "num_key_value_heads": 8,
 
147
  "rms_norm_eps": 1e-05,
 
148
  "rope_theta": 10000.0,
149
  "sliding_window": 4096,
150
  "tie_word_embeddings": false,
151
  "tokenizer_class": "LlamaTokenizerFast",
152
  "torch_dtype": "float32",
153
+ "transformers_version": "4.42.4",
154
  "use_cache": true,
155
  "vocab_size": 43317
156
  }
maker.sh CHANGED
@@ -7,43 +7,7 @@ cat << 'EOF' > $TMP
7
  #! /usr/bin/env deepspeed
8
  src="KoichiYasuoka/Swallow-MS-7b-upos"
9
  tgt="KoichiYasuoka/Swallow-MS-7b-char-upos"
10
- from transformers import LlamaTokenizerFast,MistralModel,MistralPreTrainedModel,AutoConfig,DataCollatorForTokenClassification,TrainingArguments,Trainer
11
- from transformers.modeling_outputs import TokenClassifierOutput
12
-
13
- class MistralForTokenClassification(MistralPreTrainedModel):
14
- def __init__(self,config):
15
- from torch import nn
16
- super().__init__(config)
17
- self.num_labels=config.num_labels
18
- self.model=MistralModel(config)
19
- if hasattr(config,"classifier_dropout") and config.classifier_dropout is not None:
20
- classifier_dropout=config.classifier_dropout
21
- elif hasattr(config,"hidden_dropout") and config.hidden_dropout is not None:
22
- classifier_dropout=config.hidden_dropout
23
- else:
24
- classifier_dropout=0.1
25
- self.dropout=nn.Dropout(classifier_dropout)
26
- self.classifier=nn.Linear(config.hidden_size,config.num_labels)
27
- self.post_init()
28
- def get_input_embeddings(self):
29
- return self.model.embed_tokens
30
- def set_input_embeddings(self,value):
31
- self.model.embed_tokens=value
32
- def forward(self,input_ids=None,past_key_values=None,attention_mask=None,position_ids=None,inputs_embeds=None,labels=None,use_cache=None,output_attentions=None,output_hidden_states=None,return_dict=None):
33
- return_dict=return_dict if return_dict is not None else self.config.use_return_dict
34
- transformer_outputs=self.model(input_ids,past_key_values=past_key_values,attention_mask=attention_mask,position_ids=position_ids,inputs_embeds=inputs_embeds,use_cache=use_cache,output_attentions=output_attentions,output_hidden_states=output_hidden_states,return_dict=return_dict)
35
- hidden_states=transformer_outputs[0]
36
- hidden_states=self.dropout(hidden_states)
37
- logits=self.classifier(hidden_states)
38
- loss=None
39
- if labels is not None:
40
- from torch import nn
41
- loss_fct=nn.CrossEntropyLoss()
42
- loss=loss_fct(logits.view(-1,self.num_labels),labels.view(-1))
43
- if not return_dict:
44
- output=(logits,)+transformer_outputs[1:]
45
- return ((loss,)+output) if loss is not None else output
46
- return TokenClassifierOutput(loss=loss,logits=logits,hidden_states=transformer_outputs.hidden_states,attentions=transformer_outputs.attentions)
47
 
48
  class UPOSFileDataset(object):
49
  def __init__(self,conllu,tokenizer):
 
7
  #! /usr/bin/env deepspeed
8
  src="KoichiYasuoka/Swallow-MS-7b-upos"
9
  tgt="KoichiYasuoka/Swallow-MS-7b-char-upos"
10
+ from transformers import LlamaTokenizerFast,MistralForTokenClassification,AutoConfig,DataCollatorForTokenClassification,TrainingArguments,Trainer
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  class UPOSFileDataset(object):
13
  def __init__(self,conllu,tokenizer):
pytorch_model-00001-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5cb0ba6633ea74b22049cea79e435b5a3ad19649a4ff9f48743567b64c902c13
3
  size 4837046848
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:920de61157bca575606e5d7771021f9e26a2f59e960ea73331d5a851d2be5d9d
3
  size 4837046848
pytorch_model-00002-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:20f447a34503f5ec4003bc9cfbe6c1abafffd48d0a4ce49548563372be559d79
3
  size 4999825256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84004b4e1d2ece78cf6e75316708894bd7d3869eb1b4178052186c6f3adf57e7
3
  size 4999825256
pytorch_model-00003-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:57e46f1de15876e55c70f5cd7f3d1f01bd5bb4aeaeb34bee192bf652605dedb9
3
  size 4999825316
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78be053c49eb825ca2218ef2f78b46e3dde12b2d47fed937f327c79471a39301
3
  size 4999825316
pytorch_model-00004-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3c18cd5ab8290dcc966550409a6c550d4ce1df15192cbaf3be94c881ec0dcaa6
3
  size 4832018324
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:362d2eade25db5473957ab7dd4e6109d4e235c42d4bf8f5836d7e4c24b4454f9
3
  size 4832018324
pytorch_model-00005-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5ae2b56e85c3a9751dce1784b6f4af3d5bdfac7a45cf357e77709a2145a33a88
3
  size 4999825320
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30c7e3a2aeb9dfdd442fef34f057e579c594d408493b501e4ccba2d03f326585
3
  size 4999825320
pytorch_model-00006-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1596ed873cf4ca299b38404224d58504ad40b0e3936e8ed9196a8670709e9ef9
3
  size 3960601264
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9df0e13a1c799bacf4f739077b2baf63e9db033474b761a2ebda4d30db9b0616
3
  size 3960601264
pytorch_model.bin.index.json CHANGED
@@ -3,8 +3,6 @@
3
  "total_size": 28629041392
4
  },
5
  "weight_map": {
6
- "classifier.bias": "pytorch_model-00006-of-00006.bin",
7
- "classifier.weight": "pytorch_model-00006-of-00006.bin",
8
  "model.embed_tokens.weight": "pytorch_model-00001-of-00006.bin",
9
  "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
10
  "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
@@ -294,6 +292,8 @@
294
  "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
295
  "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00002-of-00006.bin",
296
  "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
297
- "model.norm.weight": "pytorch_model-00006-of-00006.bin"
 
 
298
  }
299
  }
 
3
  "total_size": 28629041392
4
  },
5
  "weight_map": {
 
 
6
  "model.embed_tokens.weight": "pytorch_model-00001-of-00006.bin",
7
  "model.layers.0.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
8
  "model.layers.0.mlp.down_proj.weight": "pytorch_model-00001-of-00006.bin",
 
292
  "model.layers.9.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
293
  "model.layers.9.self_attn.q_proj.weight": "pytorch_model-00002-of-00006.bin",
294
  "model.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
295
+ "model.norm.weight": "pytorch_model-00006-of-00006.bin",
296
+ "score.bias": "pytorch_model-00006-of-00006.bin",
297
+ "score.weight": "pytorch_model-00006-of-00006.bin"
298
  }
299
  }
tokenizer.json CHANGED
@@ -42927,523 +42927,523 @@
42927
  "勅": 42797,
42928
  "婿": 42798,
42929
  "魯": 42799,
42930
- "": 42800,
42931
- "": 42801,
42932
- "": 42802,
42933
- "": 42803,
42934
- "": 42804,
42935
- "": 42805,
42936
- "": 42806,
42937
- "": 42807,
42938
- "": 42808,
42939
- "": 42809,
42940
- "": 42810,
42941
- "": 42811,
42942
- "": 42812,
42943
- "": 42813,
42944
- "": 42814,
42945
- "": 42815,
42946
- "": 42816,
42947
- "": 42817,
42948
- "": 42818,
42949
- "": 42819,
42950
- "": 42820,
42951
- "": 42821,
42952
- "": 42822,
42953
- "": 42823,
42954
- "": 42824,
42955
- "": 42825,
42956
- "": 42826,
42957
- "": 42827,
42958
- "": 42828,
42959
- "": 42829,
42960
- "": 42830,
42961
- "": 42831,
42962
- "": 42832,
42963
- "": 42833,
42964
- "": 42834,
42965
- "": 42835,
42966
- "": 42836,
42967
- "": 42837,
42968
- "": 42838,
42969
- "": 42839,
42970
- "": 42840,
42971
- "": 42841,
42972
- "": 42842,
42973
- "": 42843,
42974
- "": 42844,
42975
- "": 42845,
42976
- "": 42846,
42977
- "": 42847,
42978
- "": 42848,
42979
- "": 42849,
42980
- "": 42850,
42981
- "": 42851,
42982
- "": 42852,
42983
- "": 42853,
42984
- "": 42854,
42985
- "": 42855,
42986
- "": 42856,
42987
- "": 42857,
42988
- "": 42858,
42989
- "": 42859,
42990
- "": 42860,
42991
- "": 42861,
42992
- "": 42862,
42993
- "": 42863,
42994
- "": 42864,
42995
- "": 42865,
42996
- "": 42866,
42997
- "": 42867,
42998
- "": 42868,
42999
- "": 42869,
43000
- "": 42870,
43001
- "": 42871,
43002
- "": 42872,
43003
- "": 42873,
43004
- "": 42874,
43005
- "": 42875,
43006
- "": 42876,
43007
- "": 42877,
43008
- "": 42878,
43009
- "": 42879,
43010
- "": 42880,
43011
- "": 42881,
43012
- "": 42882,
43013
- "": 42883,
43014
- "": 42884,
43015
- "": 42885,
43016
- "": 42886,
43017
- "": 42887,
43018
- "": 42888,
43019
- "": 42889,
43020
- "": 42890,
43021
- "": 42891,
43022
- "": 42892,
43023
- "": 42893,
43024
- "": 42894,
43025
- "": 42895,
43026
- "": 42896,
43027
- "": 42897,
43028
- "": 42898,
43029
- "": 42899,
43030
- "": 42900,
43031
- "": 42901,
43032
- "": 42902,
43033
- "": 42903,
43034
- "": 42904,
43035
- "": 42905,
43036
- "": 42906,
43037
- "": 42907,
43038
- "": 42908,
43039
- "": 42909,
43040
- "": 42910,
43041
- "": 42911,
43042
- "": 42912,
43043
- "": 42913,
43044
- "": 42914,
43045
- "": 42915,
43046
- "": 42916,
43047
- "": 42917,
43048
- "": 42918,
43049
- "": 42919,
43050
- "": 42920,
43051
- "": 42921,
43052
- "": 42922,
43053
- "": 42923,
43054
- "": 42924,
43055
- "": 42925,
43056
- "": 42926,
43057
- "": 42927,
43058
- "": 42928,
43059
- "": 42929,
43060
- "": 42930,
43061
- "": 42931,
43062
- "𠮟": 42932,
43063
- "": 42933,
43064
- "": 42934,
43065
- "": 42935,
43066
- "": 42936,
43067
- "": 42937,
43068
- "": 42938,
43069
- "": 42939,
43070
- "": 42940,
43071
- "": 42941,
43072
- "": 42942,
43073
- "": 42943,
43074
- "": 42944,
43075
- "": 42945,
43076
- "": 42946,
43077
- "": 42947,
43078
- "": 42948,
43079
- "": 42949,
43080
- "": 42950,
43081
- "": 42951,
43082
- "": 42952,
43083
- "": 42953,
43084
- "": 42954,
43085
- "": 42955,
43086
- "": 42956,
43087
- "": 42957,
43088
- "": 42958,
43089
- "": 42959,
43090
- "": 42960,
43091
- "": 42961,
43092
- "": 42962,
43093
- "": 42963,
43094
- "": 42964,
43095
- "": 42965,
43096
- "": 42966,
43097
- "": 42967,
43098
- "": 42968,
43099
- "": 42969,
43100
- "": 42970,
43101
- "": 42971,
43102
- "": 42972,
43103
- "": 42973,
43104
- "": 42974,
43105
- "": 42975,
43106
- "": 42976,
43107
- "": 42977,
43108
- "": 42978,
43109
- "": 42979,
43110
- "": 42980,
43111
- "": 42981,
43112
- "": 42982,
43113
- "": 42983,
43114
- "忿": 42984,
43115
- "": 42985,
43116
- "": 42986,
43117
- "": 42987,
43118
- "": 42988,
43119
- "": 42989,
43120
- "": 42990,
43121
- "": 42991,
43122
- "": 42992,
43123
- "": 42993,
43124
- "": 42994,
43125
- "": 42995,
43126
- "": 42996,
43127
- "": 42997,
43128
- "": 42998,
43129
- "": 42999,
43130
- "": 43000,
43131
- "": 43001,
43132
- "": 43002,
43133
- "": 43003,
43134
- "": 43004,
43135
- "": 43005,
43136
- "": 43006,
43137
- "": 43007,
43138
- "": 43008,
43139
- "": 43009,
43140
- "": 43010,
43141
- "": 43011,
43142
- "": 43012,
43143
- "": 43013,
43144
- "": 43014,
43145
- "": 43015,
43146
- "": 43016,
43147
- "": 43017,
43148
- "": 43018,
43149
- "": 43019,
43150
- "": 43020,
43151
- "": 43021,
43152
- "": 43022,
43153
- "": 43023,
43154
- "": 43024,
43155
- "": 43025,
43156
- "": 43026,
43157
- "": 43027,
43158
- "": 43028,
43159
- "": 43029,
43160
- "": 43030,
43161
- "": 43031,
43162
- "": 43032,
43163
- "": 43033,
43164
- "": 43034,
43165
- "": 43035,
43166
- "": 43036,
43167
- "": 43037,
43168
- "": 43038,
43169
- "���": 43039,
43170
- "": 43040,
43171
- "": 43041,
43172
- "": 43042,
43173
- "": 43043,
43174
- "": 43044,
43175
- "": 43045,
43176
- "": 43046,
43177
- "": 43047,
43178
- "": 43048,
43179
- "": 43049,
43180
- "": 43050,
43181
- "": 43051,
43182
- "": 43052,
43183
- "": 43053,
43184
- "": 43054,
43185
- "": 43055,
43186
- "": 43056,
43187
- "": 43057,
43188
- "": 43058,
43189
- "": 43059,
43190
- "": 43060,
43191
- "": 43061,
43192
- "": 43062,
43193
- "": 43063,
43194
- "": 43064,
43195
- "": 43065,
43196
- "": 43066,
43197
- "": 43067,
43198
- "": 43068,
43199
- "": 43069,
43200
- "": 43070,
43201
- "": 43071,
43202
- "": 43072,
43203
- "": 43073,
43204
- "": 43074,
43205
- "": 43075,
43206
- "": 43076,
43207
- "": 43077,
43208
- "": 43078,
43209
- "": 43079,
43210
- "": 43080,
43211
- "": 43081,
43212
- "": 43082,
43213
- "": 43083,
43214
- "": 43084,
43215
- "": 43085,
43216
- "": 43086,
43217
- "": 43087,
43218
- "": 43088,
43219
- "": 43089,
43220
- "": 43090,
43221
- "": 43091,
43222
- "": 43092,
43223
- "": 43093,
43224
- "": 43094,
43225
- "": 43095,
43226
- "": 43096,
43227
- "": 43097,
43228
- "": 43098,
43229
- "": 43099,
43230
- "": 43100,
43231
- "": 43101,
43232
- "": 43102,
43233
- "": 43103,
43234
- "": 43104,
43235
- "": 43105,
43236
- "": 43106,
43237
- "": 43107,
43238
- "": 43108,
43239
- "": 43109,
43240
- "": 43110,
43241
- "": 43111,
43242
- "": 43112,
43243
- "": 43113,
43244
- "": 43114,
43245
- "": 43115,
43246
- "": 43116,
43247
- "": 43117,
43248
- "": 43118,
43249
- "": 43119,
43250
- "": 43120,
43251
- "": 43121,
43252
- "": 43122,
43253
- "": 43123,
43254
- "": 43124,
43255
- "": 43125,
43256
- "": 43126,
43257
- "": 43127,
43258
- "": 43128,
43259
- "": 43129,
43260
- "": 43130,
43261
- "": 43131,
43262
- "": 43132,
43263
- "": 43133,
43264
- "": 43134,
43265
- "": 43135,
43266
- "": 43136,
43267
- "": 43137,
43268
- "": 43138,
43269
- "": 43139,
43270
- "": 43140,
43271
- "": 43141,
43272
- "": 43142,
43273
- "": 43143,
43274
- "": 43144,
43275
- "": 43145,
43276
- "": 43146,
43277
- "": 43147,
43278
- "": 43148,
43279
- "": 43149,
43280
- "": 43150,
43281
- "": 43151,
43282
- "": 43152,
43283
- "": 43153,
43284
- "": 43154,
43285
- "": 43155,
43286
- "": 43156,
43287
- "": 43157,
43288
- "": 43158,
43289
- "": 43159,
43290
- "": 43160,
43291
- "": 43161,
43292
- "": 43162,
43293
- "": 43163,
43294
- "": 43164,
43295
- "": 43165,
43296
- "": 43166,
43297
- "": 43167,
43298
- "": 43168,
43299
- "": 43169,
43300
- "": 43170,
43301
- "": 43171,
43302
- "": 43172,
43303
- "": 43173,
43304
- "": 43174,
43305
- "": 43175,
43306
- "": 43176,
43307
- "": 43177,
43308
- "": 43178,
43309
- "": 43179,
43310
- "": 43180,
43311
- "": 43181,
43312
- "": 43182,
43313
- "": 43183,
43314
- "": 43184,
43315
- "": 43185,
43316
- "": 43186,
43317
- "": 43187,
43318
- "": 43188,
43319
- "": 43189,
43320
- "": 43190,
43321
- "": 43191,
43322
- "": 43192,
43323
- "": 43193,
43324
- "": 43194,
43325
- "": 43195,
43326
- "": 43196,
43327
- "": 43197,
43328
- "": 43198,
43329
- "": 43199,
43330
- "": 43200,
43331
- "": 43201,
43332
- "": 43202,
43333
- "": 43203,
43334
- "": 43204,
43335
- "": 43205,
43336
- "": 43206,
43337
- "": 43207,
43338
- "": 43208,
43339
- "": 43209,
43340
- "": 43210,
43341
- "": 43211,
43342
- "": 43212,
43343
- "": 43213,
43344
- "": 43214,
43345
- "": 43215,
43346
- "": 43216,
43347
- "": 43217,
43348
- "": 43218,
43349
- "麿": 43219,
43350
- "": 43220,
43351
- "": 43221,
43352
- "": 43222,
43353
- "": 43223,
43354
- "": 43224,
43355
- "": 43225,
43356
- "": 43226,
43357
- "": 43227,
43358
- "": 43228,
43359
- "": 43229,
43360
- "": 43230,
43361
- "": 43231,
43362
- "": 43232,
43363
- "": 43233,
43364
- "": 43234,
43365
- "": 43235,
43366
  "錮": 43236,
43367
- "": 43237,
43368
- "": 43238,
43369
- "": 43239,
43370
- "": 43240,
43371
- "": 43241,
43372
- "": 43242,
43373
- "": 43243,
43374
- "": 43244,
43375
- "": 43245,
43376
- "": 43246,
43377
- "": 43247,
43378
- "滿": 43248,
43379
- "": 43249,
43380
- "": 43250,
43381
- "": 43251,
43382
- "": 43252,
43383
- "": 43253,
43384
- "": 43254,
43385
- "": 43255,
43386
- "": 43256,
43387
- "": 43257,
43388
- "": 43258,
43389
- "": 43259,
43390
- "": 43260,
43391
- "": 43261,
43392
- "": 43262,
43393
- "": 43263,
43394
- "": 43264,
43395
- "": 43265,
43396
- "": 43266,
43397
- "": 43267,
43398
- "": 43268,
43399
- "": 43269,
43400
- "": 43270,
43401
- "": 43271,
43402
- "": 43272,
43403
- "": 43273,
43404
- "": 43274,
43405
- "": 43275,
43406
- "": 43276,
43407
- "": 43277,
43408
- "": 43278,
43409
- "": 43279,
43410
- "": 43280,
43411
- "": 43281,
43412
- "": 43282,
43413
- "": 43283,
43414
- "": 43284,
43415
- "": 43285,
43416
- "": 43286,
43417
- "": 43287,
43418
- "": 43288,
43419
- "": 43289,
43420
- "": 43290,
43421
- "": 43291,
43422
- "": 43292,
43423
- "": 43293,
43424
- "": 43294,
43425
- "": 43295,
43426
- "": 43296,
43427
- "": 43297,
43428
- "": 43298,
43429
- "": 43299,
43430
- "": 43300,
43431
- "": 43301,
43432
- "": 43302,
43433
- "": 43303,
43434
- "": 43304,
43435
- "": 43305,
43436
- "": 43306,
43437
- "": 43307,
43438
- "": 43308,
43439
- "": 43309,
43440
- "": 43310,
43441
- "": 43311,
43442
- "": 43312,
43443
- "": 43313,
43444
- "": 43314,
43445
- "": 43315,
43446
- "": 43316
43447
  },
43448
  "merges": [
43449
  "▁ t",
 
42927
  "勅": 42797,
42928
  "婿": 42798,
42929
  "魯": 42799,
42930
+ "": 42800,
42931
+ "": 42801,
42932
+ "": 42802,
42933
+ "": 42803,
42934
+ "": 42804,
42935
+ "": 42805,
42936
+ "𠮟": 42806,
42937
+ "": 42807,
42938
+ "": 42808,
42939
+ "": 42809,
42940
+ "": 42810,
42941
+ "": 42811,
42942
+ "": 42812,
42943
+ "": 42813,
42944
+ "": 42814,
42945
+ "": 42815,
42946
+ "": 42816,
42947
+ "": 42817,
42948
+ "": 42818,
42949
+ "": 42819,
42950
+ "": 42820,
42951
+ "": 42821,
42952
+ "": 42822,
42953
+ "": 42823,
42954
+ "": 42824,
42955
+ "": 42825,
42956
+ "": 42826,
42957
+ "": 42827,
42958
+ "": 42828,
42959
+ "": 42829,
42960
+ "": 42830,
42961
+ "": 42831,
42962
+ "": 42832,
42963
+ "": 42833,
42964
+ "": 42834,
42965
+ "": 42835,
42966
+ "": 42836,
42967
+ "": 42837,
42968
+ "": 42838,
42969
+ "": 42839,
42970
+ "": 42840,
42971
+ "": 42841,
42972
+ "": 42842,
42973
+ "": 42843,
42974
+ "": 42844,
42975
+ "": 42845,
42976
+ "": 42846,
42977
+ "": 42847,
42978
+ "": 42848,
42979
+ "": 42849,
42980
+ "": 42850,
42981
+ "": 42851,
42982
+ "": 42852,
42983
+ "": 42853,
42984
+ "": 42854,
42985
+ "": 42855,
42986
+ "": 42856,
42987
+ "": 42857,
42988
+ "": 42858,
42989
+ "": 42859,
42990
+ "": 42860,
42991
+ "": 42861,
42992
+ "": 42862,
42993
+ "": 42863,
42994
+ "": 42864,
42995
+ "": 42865,
42996
+ "": 42866,
42997
+ "": 42867,
42998
+ "": 42868,
42999
+ "": 42869,
43000
+ "": 42870,
43001
+ "": 42871,
43002
+ "": 42872,
43003
+ "": 42873,
43004
+ "": 42874,
43005
+ "": 42875,
43006
+ "": 42876,
43007
+ "": 42877,
43008
+ "": 42878,
43009
+ "": 42879,
43010
+ "": 42880,
43011
+ "": 42881,
43012
+ "": 42882,
43013
+ "": 42883,
43014
+ "": 42884,
43015
+ "": 42885,
43016
+ "": 42886,
43017
+ "": 42887,
43018
+ "": 42888,
43019
+ "": 42889,
43020
+ "": 42890,
43021
+ "": 42891,
43022
+ "": 42892,
43023
+ "": 42893,
43024
+ "": 42894,
43025
+ "": 42895,
43026
+ "": 42896,
43027
+ "": 42897,
43028
+ "": 42898,
43029
+ "": 42899,
43030
+ "": 42900,
43031
+ "": 42901,
43032
+ "": 42902,
43033
+ "": 42903,
43034
+ "": 42904,
43035
+ "": 42905,
43036
+ "": 42906,
43037
+ "": 42907,
43038
+ "": 42908,
43039
+ "": 42909,
43040
+ "": 42910,
43041
+ "": 42911,
43042
+ "": 42912,
43043
+ "": 42913,
43044
+ "": 42914,
43045
+ "": 42915,
43046
+ "": 42916,
43047
+ "": 42917,
43048
+ "": 42918,
43049
+ "": 42919,
43050
+ "": 42920,
43051
+ "": 42921,
43052
+ "": 42922,
43053
+ "": 42923,
43054
+ "": 42924,
43055
+ "": 42925,
43056
+ "": 42926,
43057
+ "": 42927,
43058
+ "": 42928,
43059
+ "": 42929,
43060
+ "": 42930,
43061
+ "": 42931,
43062
+ "": 42932,
43063
+ "": 42933,
43064
+ "": 42934,
43065
+ "": 42935,
43066
+ "": 42936,
43067
+ "": 42937,
43068
+ "": 42938,
43069
+ "": 42939,
43070
+ "": 42940,
43071
+ "": 42941,
43072
+ "": 42942,
43073
+ "": 42943,
43074
+ "": 42944,
43075
+ "": 42945,
43076
+ "": 42946,
43077
+ "": 42947,
43078
+ "": 42948,
43079
+ "": 42949,
43080
+ "": 42950,
43081
+ "": 42951,
43082
+ "": 42952,
43083
+ "": 42953,
43084
+ "": 42954,
43085
+ "": 42955,
43086
+ "": 42956,
43087
+ "": 42957,
43088
+ "": 42958,
43089
+ "": 42959,
43090
+ "": 42960,
43091
+ "": 42961,
43092
+ "": 42962,
43093
+ "": 42963,
43094
+ "": 42964,
43095
+ "": 42965,
43096
+ "": 42966,
43097
+ "": 42967,
43098
+ "": 42968,
43099
+ "": 42969,
43100
+ "": 42970,
43101
+ "": 42971,
43102
+ "": 42972,
43103
+ "": 42973,
43104
+ "": 42974,
43105
+ "": 42975,
43106
+ "": 42976,
43107
+ "": 42977,
43108
+ "": 42978,
43109
+ "": 42979,
43110
+ "": 42980,
43111
+ "": 42981,
43112
+ "": 42982,
43113
+ "": 42983,
43114
+ "": 42984,
43115
+ "": 42985,
43116
+ "": 42986,
43117
+ "": 42987,
43118
+ "": 42988,
43119
+ "": 42989,
43120
+ "": 42990,
43121
+ "": 42991,
43122
+ "": 42992,
43123
+ "": 42993,
43124
+ "": 42994,
43125
+ "": 42995,
43126
+ "": 42996,
43127
+ "": 42997,
43128
+ "": 42998,
43129
+ "": 42999,
43130
+ "": 43000,
43131
+ "": 43001,
43132
+ "": 43002,
43133
+ "": 43003,
43134
+ "": 43004,
43135
+ "": 43005,
43136
+ "": 43006,
43137
+ "": 43007,
43138
+ "": 43008,
43139
+ "": 43009,
43140
+ "": 43010,
43141
+ "": 43011,
43142
+ "": 43012,
43143
+ "": 43013,
43144
+ "": 43014,
43145
+ "": 43015,
43146
+ "": 43016,
43147
+ "": 43017,
43148
+ "": 43018,
43149
+ "": 43019,
43150
+ "": 43020,
43151
+ "": 43021,
43152
+ "": 43022,
43153
+ "麿": 43023,
43154
+ "": 43024,
43155
+ "": 43025,
43156
+ "": 43026,
43157
+ "": 43027,
43158
+ "": 43028,
43159
+ "": 43029,
43160
+ "": 43030,
43161
+ "": 43031,
43162
+ "": 43032,
43163
+ "": 43033,
43164
+ "": 43034,
43165
+ "": 43035,
43166
+ "": 43036,
43167
+ "": 43037,
43168
+ "": 43038,
43169
+ "": 43039,
43170
+ "": 43040,
43171
+ "": 43041,
43172
+ "": 43042,
43173
+ "": 43043,
43174
+ "": 43044,
43175
+ "": 43045,
43176
+ "": 43046,
43177
+ "": 43047,
43178
+ "": 43048,
43179
+ "": 43049,
43180
+ "": 43050,
43181
+ "": 43051,
43182
+ "": 43052,
43183
+ "": 43053,
43184
+ "": 43054,
43185
+ "": 43055,
43186
+ "": 43056,
43187
+ "": 43057,
43188
+ "": 43058,
43189
+ "": 43059,
43190
+ "": 43060,
43191
+ "": 43061,
43192
+ "": 43062,
43193
+ "": 43063,
43194
+ "": 43064,
43195
+ "": 43065,
43196
+ "": 43066,
43197
+ "": 43067,
43198
+ "": 43068,
43199
+ "": 43069,
43200
+ "": 43070,
43201
+ "": 43071,
43202
+ "": 43072,
43203
+ "": 43073,
43204
+ "": 43074,
43205
+ "": 43075,
43206
+ "": 43076,
43207
+ "": 43077,
43208
+ "": 43078,
43209
+ "": 43079,
43210
+ "": 43080,
43211
+ "": 43081,
43212
+ "": 43082,
43213
+ "": 43083,
43214
+ "": 43084,
43215
+ "": 43085,
43216
+ "": 43086,
43217
+ "": 43087,
43218
+ "": 43088,
43219
+ "": 43089,
43220
+ "": 43090,
43221
+ "": 43091,
43222
+ "": 43092,
43223
+ "": 43093,
43224
+ "": 43094,
43225
+ "": 43095,
43226
+ "": 43096,
43227
+ "": 43097,
43228
+ "": 43098,
43229
+ "": 43099,
43230
+ "": 43100,
43231
+ "": 43101,
43232
+ "": 43102,
43233
+ "": 43103,
43234
+ "": 43104,
43235
+ "": 43105,
43236
+ "": 43106,
43237
+ "": 43107,
43238
+ "": 43108,
43239
+ "": 43109,
43240
+ "": 43110,
43241
+ "": 43111,
43242
+ "": 43112,
43243
+ "": 43113,
43244
+ "": 43114,
43245
+ "": 43115,
43246
+ "": 43116,
43247
+ "": 43117,
43248
+ "": 43118,
43249
+ "": 43119,
43250
+ "": 43120,
43251
+ "": 43121,
43252
+ "": 43122,
43253
+ "": 43123,
43254
+ "": 43124,
43255
+ "": 43125,
43256
+ "": 43126,
43257
+ "": 43127,
43258
+ "": 43128,
43259
+ "": 43129,
43260
+ "": 43130,
43261
+ "": 43131,
43262
+ "": 43132,
43263
+ "": 43133,
43264
+ "": 43134,
43265
+ "": 43135,
43266
+ "": 43136,
43267
+ "忿": 43137,
43268
+ "": 43138,
43269
+ "": 43139,
43270
+ "": 43140,
43271
+ "": 43141,
43272
+ "": 43142,
43273
+ "": 43143,
43274
+ "": 43144,
43275
+ "": 43145,
43276
+ "": 43146,
43277
+ "": 43147,
43278
+ "": 43148,
43279
+ "": 43149,
43280
+ "": 43150,
43281
+ "": 43151,
43282
+ "": 43152,
43283
+ "": 43153,
43284
+ "": 43154,
43285
+ "": 43155,
43286
+ "": 43156,
43287
+ "": 43157,
43288
+ "": 43158,
43289
+ "": 43159,
43290
+ "": 43160,
43291
+ "": 43161,
43292
+ "": 43162,
43293
+ "": 43163,
43294
+ "": 43164,
43295
+ "": 43165,
43296
+ "": 43166,
43297
+ "": 43167,
43298
+ "": 43168,
43299
+ "": 43169,
43300
+ "": 43170,
43301
+ "": 43171,
43302
+ "": 43172,
43303
+ "": 43173,
43304
+ "": 43174,
43305
+ "": 43175,
43306
+ "": 43176,
43307
+ "": 43177,
43308
+ "": 43178,
43309
+ "": 43179,
43310
+ "": 43180,
43311
+ "": 43181,
43312
+ "": 43182,
43313
+ "": 43183,
43314
+ "": 43184,
43315
+ "": 43185,
43316
+ "": 43186,
43317
+ "": 43187,
43318
+ "": 43188,
43319
+ "": 43189,
43320
+ "": 43190,
43321
+ "": 43191,
43322
+ "": 43192,
43323
+ "": 43193,
43324
+ "": 43194,
43325
+ "": 43195,
43326
+ "": 43196,
43327
+ "": 43197,
43328
+ "": 43198,
43329
+ "": 43199,
43330
+ "": 43200,
43331
+ "": 43201,
43332
+ "": 43202,
43333
+ "": 43203,
43334
+ "": 43204,
43335
+ "": 43205,
43336
+ "": 43206,
43337
+ "": 43207,
43338
+ "": 43208,
43339
+ "": 43209,
43340
+ "": 43210,
43341
+ "": 43211,
43342
+ "": 43212,
43343
+ "": 43213,
43344
+ "": 43214,
43345
+ "": 43215,
43346
+ "": 43216,
43347
+ "": 43217,
43348
+ "": 43218,
43349
+ "": 43219,
43350
+ "": 43220,
43351
+ "": 43221,
43352
+ "": 43222,
43353
+ "": 43223,
43354
+ "": 43224,
43355
+ "": 43225,
43356
+ "": 43226,
43357
+ "": 43227,
43358
+ "": 43228,
43359
+ "": 43229,
43360
+ "": 43230,
43361
+ "": 43231,
43362
+ "": 43232,
43363
+ "": 43233,
43364
+ "": 43234,
43365
+ "": 43235,
43366
  "錮": 43236,
43367
+ "": 43237,
43368
+ "": 43238,
43369
+ "": 43239,
43370
+ "": 43240,
43371
+ "": 43241,
43372
+ "": 43242,
43373
+ "": 43243,
43374
+ "": 43244,
43375
+ "": 43245,
43376
+ "": 43246,
43377
+ "": 43247,
43378
+ "": 43248,
43379
+ "": 43249,
43380
+ "": 43250,
43381
+ "": 43251,
43382
+ "": 43252,
43383
+ "滿": 43253,
43384
+ "": 43254,
43385
+ "": 43255,
43386
+ "": 43256,
43387
+ "": 43257,
43388
+ "": 43258,
43389
+ "": 43259,
43390
+ "": 43260,
43391
+ "": 43261,
43392
+ "": 43262,
43393
+ "": 43263,
43394
+ "": 43264,
43395
+ "": 43265,
43396
+ "": 43266,
43397
+ "": 43267,
43398
+ "": 43268,
43399
+ "": 43269,
43400
+ "": 43270,
43401
+ "": 43271,
43402
+ "": 43272,
43403
+ "": 43273,
43404
+ "": 43274,
43405
+ "": 43275,
43406
+ "": 43276,
43407
+ "": 43277,
43408
+ "": 43278,
43409
+ "": 43279,
43410
+ "": 43280,
43411
+ "": 43281,
43412
+ "": 43282,
43413
+ "": 43283,
43414
+ "": 43284,
43415
+ "": 43285,
43416
+ "": 43286,
43417
+ "": 43287,
43418
+ "": 43288,
43419
+ "": 43289,
43420
+ "": 43290,
43421
+ "": 43291,
43422
+ "": 43292,
43423
+ "": 43293,
43424
+ "": 43294,
43425
+ "": 43295,
43426
+ "": 43296,
43427
+ "": 43297,
43428
+ "": 43298,
43429
+ "": 43299,
43430
+ "": 43300,
43431
+ "": 43301,
43432
+ "": 43302,
43433
+ "": 43303,
43434
+ "": 43304,
43435
+ "": 43305,
43436
+ "": 43306,
43437
+ "": 43307,
43438
+ "": 43308,
43439
+ "": 43309,
43440
+ "": 43310,
43441
+ "": 43311,
43442
+ "": 43312,
43443
+ "": 43313,
43444
+ "": 43314,
43445
+ "": 43315,
43446
+ "": 43316
43447
  },
43448
  "merges": [
43449
  "▁ t",
tokenizer_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -36,7 +37,7 @@
36
  "model_max_length": 4096,
37
  "pad_token": "</s>",
38
  "sep_token": "<s>",
39
- "tokenizer_class": "LlamaTokenizer",
40
  "unk_token": "<unk>",
41
  "use_default_system_prompt": false
42
  }
 
1
  {
2
  "add_bos_token": true,
3
  "add_eos_token": false,
4
+ "add_prefix_space": null,
5
  "added_tokens_decoder": {
6
  "0": {
7
  "content": "<unk>",
 
37
  "model_max_length": 4096,
38
  "pad_token": "</s>",
39
  "sep_token": "<s>",
40
+ "tokenizer_class": "LlamaTokenizerFast",
41
  "unk_token": "<unk>",
42
  "use_default_system_prompt": false
43
  }
upos.py CHANGED
@@ -1,5 +1,4 @@
1
- from transformers import TokenClassificationPipeline,MistralModel,MistralPreTrainedModel
2
- from transformers.modeling_outputs import TokenClassifierOutput
3
 
4
  class BellmanFordTokenClassificationPipeline(TokenClassificationPipeline):
5
  def __init__(self,**kwargs):
@@ -40,41 +39,3 @@ class BellmanFordTokenClassificationPipeline(TokenClassificationPipeline):
40
  t["text"]=model_outputs["sentence"][t["start"]:t["end"]]
41
  return w
42
 
43
- class RawTokenClassificationPipeline(TokenClassificationPipeline):
44
- def check_model_type(self,supported_models):
45
- pass
46
-
47
- class MistralForTokenClassification(MistralPreTrainedModel):
48
- def __init__(self,config):
49
- from torch import nn
50
- super().__init__(config)
51
- self.num_labels=config.num_labels
52
- self.model=MistralModel(config)
53
- if hasattr(config,"classifier_dropout") and config.classifier_dropout is not None:
54
- classifier_dropout=config.classifier_dropout
55
- elif hasattr(config,"hidden_dropout") and config.hidden_dropout is not None:
56
- classifier_dropout=config.hidden_dropout
57
- else:
58
- classifier_dropout=0.1
59
- self.dropout=nn.Dropout(classifier_dropout)
60
- self.classifier=nn.Linear(config.hidden_size,config.num_labels)
61
- self.post_init()
62
- def get_input_embeddings(self):
63
- return self.model.embed_tokens
64
- def set_input_embeddings(self,value):
65
- self.model.embed_tokens=value
66
- def forward(self,input_ids=None,past_key_values=None,attention_mask=None,position_ids=None,inputs_embeds=None,labels=None,use_cache=None,output_attentions=None,output_hidden_states=None,return_dict=None):
67
- return_dict=return_dict if return_dict is not None else self.config.use_return_dict
68
- transformer_outputs=self.model(input_ids,past_key_values=past_key_values,attention_mask=attention_mask,position_ids=position_ids,inputs_embeds=inputs_embeds,use_cache=use_cache,output_attentions=output_attentions,output_hidden_states=output_hidden_states,return_dict=return_dict)
69
- hidden_states=transformer_outputs[0]
70
- hidden_states=self.dropout(hidden_states)
71
- logits=self.classifier(hidden_states)
72
- loss=None
73
- if labels is not None:
74
- from torch import nn
75
- loss_fct=nn.CrossEntropyLoss()
76
- loss=loss_fct(logits.view(-1,self.num_labels),labels.view(-1))
77
- if not return_dict:
78
- output=(logits,)+transformer_outputs[2:]
79
- return ((loss,)+output) if loss is not None else output
80
- return TokenClassifierOutput(loss=loss,logits=logits,hidden_states=transformer_outputs.hidden_states,attentions=transformer_outputs.attentions)
 
1
+ from transformers import TokenClassificationPipeline
 
2
 
3
  class BellmanFordTokenClassificationPipeline(TokenClassificationPipeline):
4
  def __init__(self,**kwargs):
 
39
  t["text"]=model_outputs["sentence"][t["start"]:t["end"]]
40
  return w
41