asahi417's picture
add model
dee9668
{
"_name_or_path": "roberta-large-tweebank-ner",
"adapters": {
"adapters": {},
"config_map": {},
"fusion_config_map": {},
"fusions": {}
},
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"crf_state_dict": {
"_constraint_mask": [
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
],
"end_transitions": [
1.6543885469436646,
1.4006357192993164,
0.4444950819015503,
-0.8417924046516418,
-1.5104246139526367,
3.0435879230499268,
2.1962571144104004,
-0.8038378357887268,
-1.0722748041152954
],
"start_transitions": [
1.2061455249786377,
0.6586089134216309,
0.4692096412181854,
0.5858622789382935,
-0.630755603313446,
-0.32020196318626404,
-0.5469271540641785,
-0.3013189435005188,
-0.14686475694179535
],
"transitions": [
[
0.13537588715553284,
-0.13092884421348572,
0.4625251293182373,
0.23847511410713196,
0.34067708253860474,
0.12975099682807922,
0.5387805700302124,
0.09070442616939545,
0.3408451974391937
],
[
-0.14204159379005432,
0.37123164534568787,
-0.21415242552757263,
0.011411471292376518,
-0.4560433328151703,
-0.34037908911705017,
-0.2033482789993286,
-0.906518816947937,
-0.47004014253616333
],
[
-0.17460417747497559,
-0.1531568169593811,
-0.006479573901742697,
-0.4537687301635742,
-0.050648175179958344,
0.38808903098106384,
-0.16089072823524475,
-0.04831279069185257,
0.05039094015955925
],
[
-0.11159854382276535,
-0.41543281078338623,
0.555843710899353,
0.014951681718230247,
0.08906713128089905,
-0.13042980432510376,
-0.09327378123998642,
0.5290833711624146,
0.10889127105474472
],
[
0.33919236063957214,
-0.4331152141094208,
-0.38547754287719727,
0.32877588272094727,
-0.5369617342948914,
0.07536065578460693,
-0.31710729002952576,
0.037764519453048706,
-0.3209592401981354
],
[
0.027896448969841003,
0.2133970558643341,
0.5522003173828125,
-0.2629242539405823,
-0.01897781528532505,
-0.07080112397670746,
-0.8012456297874451,
-0.007121096830815077,
0.5570559501647949
],
[
-0.130000039935112,
-0.26081007719039917,
-0.159550741314888,
-0.4839116334915161,
0.49926015734672546,
0.2511492371559143,
0.014935985207557678,
-0.04589146375656128,
0.4296587407588959
],
[
0.17752425372600555,
-0.0608215294778347,
0.03037858009338379,
0.09913276135921478,
0.2518308758735657,
-0.5100915431976318,
0.1673133820295334,
-0.06604345887899399,
0.18997260928153992
],
[
-0.2999376654624939,
0.03706861287355423,
-0.3692086338996887,
0.14070719480514526,
-0.05091197043657303,
0.14978167414665222,
0.547923743724823,
0.015628628432750702,
0.17064540088176727
]
]
},
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "B-LOC",
"1": "B-MISC",
"2": "B-ORG",
"3": "B-PER",
"4": "I-LOC",
"5": "I-MISC",
"6": "I-ORG",
"7": "I-PER",
"8": "O"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"B-LOC": 0,
"B-MISC": 1,
"B-ORG": 2,
"B-PER": 3,
"I-LOC": 4,
"I-MISC": 5,
"I-ORG": 6,
"I-PER": 7,
"O": 8
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.11.3",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}