asahi417's picture
add model
53d170d
raw
history blame
13.3 kB
{
"_name_or_path": "cner_output/model/baseline_2021/t_roberta_base_2019_concat/best_model",
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"crf_state_dict": {
"_constraint_mask": [
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
],
"end_transitions": [
0.40813568234443665,
-0.9656944870948792,
-0.23096604645252228,
-0.7424056529998779,
0.06271817535161972,
-0.08851249516010284,
0.427242636680603,
0.8107076287269592,
0.75925213098526,
0.046232301741838455,
-0.9167554378509521,
0.290357381105423,
1.037453055381775,
0.4241470694541931,
-0.20250964164733887
],
"start_transitions": [
-0.19599823653697968,
-0.09508302807807922,
-0.2742212116718292,
1.655328631401062,
0.2364160120487213,
-0.18163415789604187,
-0.5102927088737488,
2.0175106525421143,
0.8966974020004272,
-1.3882125616073608,
-0.12662316858768463,
1.786523699760437,
-0.6999648213386536,
-0.7579379677772522,
-1.5308568477630615
],
"transitions": [
[
-0.355110764503479,
-0.2056400328874588,
0.15599896013736725,
-0.18394044041633606,
-0.0003968794480897486,
0.10536038875579834,
0.463561475276947,
-0.0618780180811882,
0.013926028273999691,
-0.3195348381996155,
0.09985961019992828,
-0.02738342434167862,
0.33145588636398315,
0.27501580119132996,
-0.19393549859523773
],
[
0.024281613528728485,
-0.1727910190820694,
0.012486646883189678,
0.12692007422447205,
0.3267900049686432,
0.17165137827396393,
0.43007922172546387,
-0.04601696506142616,
-0.08021749556064606,
-0.11490590125322342,
-0.3332799971103668,
0.03123163990676403,
-0.1380016803741455,
0.30161723494529724,
-0.1864267885684967
],
[
-0.06675279885530472,
-0.1385592520236969,
0.2339944690465927,
-0.010947855189442635,
-0.26166200637817383,
-0.014474852941930294,
-0.43208983540534973,
0.3126372694969177,
0.06294334679841995,
-0.6153910756111145,
-0.018000507727265358,
-0.17603769898414612,
-0.3979818522930145,
0.1724555790424347,
0.09848079085350037
],
[
0.08595029264688492,
-0.09957896918058395,
-0.21321150660514832,
-0.10036883503198624,
-0.2448240965604782,
0.11451995372772217,
0.005236147437244654,
-0.3689935803413391,
0.05055497586727142,
0.07847447693347931,
-0.036021389067173004,
-0.19114325940608978,
0.26740312576293945,
0.09406544268131256,
-0.1619744449853897
],
[
-0.34500449895858765,
0.17636552453041077,
0.3426302969455719,
0.3570649325847626,
-0.033118460327386856,
0.25352025032043457,
0.2820196747779846,
-0.2842826247215271,
-0.0757998451590538,
-0.20751041173934937,
0.08382812887430191,
-0.05385832116007805,
-0.6202051639556885,
0.26277902722358704,
-0.19061028957366943
],
[
-0.15309004485607147,
-0.03818637877702713,
-0.3205626308917999,
-0.007152817212045193,
-0.23848870396614075,
-0.4665132462978363,
0.29147228598594666,
0.016402531415224075,
-0.24547792971134186,
-0.011487615294754505,
-0.052587736397981644,
0.019970647990703583,
-0.2511771619319916,
0.04204389080405235,
0.25347983837127686
],
[
-0.18473607301712036,
0.49087584018707275,
-0.1454090178012848,
0.060093726962804794,
-0.11222531646490097,
-0.009585360065102577,
0.14939837157726288,
0.04477058723568916,
-0.22784698009490967,
-0.23066850006580353,
0.035520922392606735,
0.3430928885936737,
0.013369472697377205,
-0.3202819526195526,
-0.2682534158229828
],
[
-0.08595593273639679,
0.16982443630695343,
-0.24643993377685547,
0.21543455123901367,
0.2005544751882553,
-0.07169633358716965,
0.26921454071998596,
0.03557285666465759,
-0.3000183403491974,
-0.12360482662916183,
-0.2618837058544159,
-0.05183147266507149,
0.164887472987175,
-0.3959388732910156,
-0.4992673993110657
],
[
0.5432504415512085,
0.27023521065711975,
0.23970918357372284,
-0.07022026181221008,
0.7626280784606934,
-0.18624334037303925,
-0.551729142665863,
0.15499942004680634,
-0.059867601841688156,
0.19733677804470062,
0.13143454492092133,
0.20088809728622437,
0.23395177721977234,
0.10379340499639511,
-0.07547645270824432
],
[
0.3556808829307556,
0.17868858575820923,
-0.0783262699842453,
0.3939509987831116,
-0.24936209619045258,
-0.0723569318652153,
0.34536612033843994,
0.07597124576568604,
-0.46351075172424316,
0.30960139632225037,
0.16907134652137756,
0.18229074776172638,
0.0846860334277153,
-0.09902326762676239,
-0.01117414515465498
],
[
0.11290524154901505,
-0.3695250451564789,
0.17961908876895905,
0.2828933298587799,
0.01695784367620945,
-0.053896352648735046,
0.3965355455875397,
-0.182089701294899,
-0.015261529013514519,
-0.4007752239704132,
0.02188030816614628,
0.15352727472782135,
-0.08320505172014236,
0.29708775877952576,
0.26168277859687805
],
[
-0.15201331675052643,
-0.02467992715537548,
0.0368831492960453,
-0.29199400544166565,
0.07536514103412628,
0.1266368180513382,
-0.043436527252197266,
0.24213790893554688,
-0.20409177243709564,
-0.4188157916069031,
-0.22468942403793335,
-0.7173008322715759,
0.04286476597189903,
0.018884999677538872,
0.0638054609298706
],
[
-0.3171590268611908,
-0.03850363567471504,
0.32614806294441223,
0.14585517346858978,
0.25313910841941833,
0.09943120926618576,
-0.08848121762275696,
-0.14669257402420044,
-0.17915785312652588,
-0.2008930891752243,
0.2775852382183075,
0.02613728679716587,
-0.04968656972050667,
0.5244881510734558,
-0.010464445687830448
],
[
-0.2951357364654541,
0.14690770208835602,
-0.528136670589447,
-0.16182759404182434,
-0.18725460767745972,
0.04632202535867691,
0.504802942276001,
0.1270989626646042,
0.027206480503082275,
0.029595883563160896,
0.4417790174484253,
-0.10742947459220886,
0.06234097108244896,
0.48277929425239563,
-0.12036901712417603
],
[
0.2361898571252823,
-0.008896274492144585,
-0.7292954921722412,
0.1873794049024582,
-0.07346121966838837,
0.22039586305618286,
0.10992086678743362,
0.04667862504720688,
-0.3448407053947449,
-0.2950391471385956,
0.0212259478867054,
0.16021989285945892,
-0.3255569636821747,
0.004677820950746536,
-0.1370781809091568
]
]
},
"eos_token_id": 2,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "B-corporation",
"1": "B-creative_work",
"2": "B-event",
"3": "B-group",
"4": "B-location",
"5": "B-person",
"6": "B-product",
"7": "I-corporation",
"8": "I-creative_work",
"9": "I-event",
"10": "I-group",
"11": "I-location",
"12": "I-person",
"13": "I-product",
"14": "O"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B-corporation": 0,
"B-creative_work": 1,
"B-event": 2,
"B-group": 3,
"B-location": 4,
"B-person": 5,
"B-product": 6,
"I-corporation": 7,
"I-creative_work": 8,
"I-event": 9,
"I-group": 10,
"I-location": 11,
"I-person": 12,
"I-product": 13,
"O": 14
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.17.0",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}