|
{
|
|
"_name_or_path": "./models/best_model",
|
|
"activation": "gelu",
|
|
"architectures": [
|
|
"DistilBertForTokenClassification"
|
|
],
|
|
"attention_dropout": 0.1,
|
|
"dim": 768,
|
|
"dropout": 0.1,
|
|
"hidden_dim": 3072,
|
|
"id2label": {
|
|
"0": "O",
|
|
"1": "B-ACCOUNTNUM",
|
|
"2": "I-ACCOUNTNUM",
|
|
"3": "B-BUILDINGNUM",
|
|
"4": "I-BUILDINGNUM",
|
|
"5": "B-CITY",
|
|
"6": "I-CITY",
|
|
"7": "B-CREDITCARDNUMBER",
|
|
"8": "I-CREDITCARDNUMBER",
|
|
"9": "B-DATEOFBIRTH",
|
|
"10": "I-DATEOFBIRTH",
|
|
"11": "B-DRIVERLICENSENUM",
|
|
"12": "I-DRIVERLICENSENUM",
|
|
"13": "B-EMAIL",
|
|
"14": "I-EMAIL",
|
|
"15": "B-GIVENNAME",
|
|
"16": "I-GIVENNAME",
|
|
"17": "B-IDCARDNUM",
|
|
"18": "I-IDCARDNUM",
|
|
"19": "B-PASSWORD",
|
|
"20": "I-PASSWORD",
|
|
"21": "B-SOCIALNUM",
|
|
"22": "I-SOCIALNUM",
|
|
"23": "B-STREET",
|
|
"24": "I-STREET",
|
|
"25": "B-SURNAME",
|
|
"26": "I-SURNAME",
|
|
"27": "B-TAXNUM",
|
|
"28": "I-TAXNUM",
|
|
"29": "B-TELEPHONENUM",
|
|
"30": "I-TELEPHONENUM",
|
|
"31": "B-USERNAME",
|
|
"32": "I-USERNAME",
|
|
"33": "B-ZIPCODE",
|
|
"34": "I-ZIPCODE"
|
|
},
|
|
"initializer_range": 0.02,
|
|
"label2id": {
|
|
"B-ACCOUNTNUM": 1,
|
|
"B-BUILDINGNUM": 3,
|
|
"B-CITY": 5,
|
|
"B-CREDITCARDNUMBER": 7,
|
|
"B-DATEOFBIRTH": 9,
|
|
"B-DRIVERLICENSENUM": 11,
|
|
"B-EMAIL": 13,
|
|
"B-GIVENNAME": 15,
|
|
"B-IDCARDNUM": 17,
|
|
"B-PASSWORD": 19,
|
|
"B-SOCIALNUM": 21,
|
|
"B-STREET": 23,
|
|
"B-SURNAME": 25,
|
|
"B-TAXNUM": 27,
|
|
"B-TELEPHONENUM": 29,
|
|
"B-USERNAME": 31,
|
|
"B-ZIPCODE": 33,
|
|
"I-ACCOUNTNUM": 2,
|
|
"I-BUILDINGNUM": 4,
|
|
"I-CITY": 6,
|
|
"I-CREDITCARDNUMBER": 8,
|
|
"I-DATEOFBIRTH": 10,
|
|
"I-DRIVERLICENSENUM": 12,
|
|
"I-EMAIL": 14,
|
|
"I-GIVENNAME": 16,
|
|
"I-IDCARDNUM": 18,
|
|
"I-PASSWORD": 20,
|
|
"I-SOCIALNUM": 22,
|
|
"I-STREET": 24,
|
|
"I-SURNAME": 26,
|
|
"I-TAXNUM": 28,
|
|
"I-TELEPHONENUM": 30,
|
|
"I-USERNAME": 32,
|
|
"I-ZIPCODE": 34,
|
|
"O": 0
|
|
},
|
|
"max_position_embeddings": 512,
|
|
"model_type": "distilbert",
|
|
"n_heads": 12,
|
|
"n_layers": 6,
|
|
"output_past": true,
|
|
"pad_token_id": 0,
|
|
"qa_dropout": 0.1,
|
|
"seq_classif_dropout": 0.2,
|
|
"sinusoidal_pos_embds": false,
|
|
"tie_weights_": true,
|
|
"torch_dtype": "float32",
|
|
"transformers_version": "4.46.0",
|
|
"vocab_size": 119547
|
|
}
|
|
|