|
{ |
|
"_name_or_path": "AutoTrain", |
|
"_num_labels": 17, |
|
"architectures": [ |
|
"LongformerForSequenceClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.1, |
|
"attention_window": [ |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512, |
|
512 |
|
], |
|
"bos_token_id": 0, |
|
"classifier_dropout": null, |
|
"directionality": "bidi", |
|
"eos_token_id": 2, |
|
"gradient_checkpointing": false, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.1, |
|
"hidden_size": 768, |
|
"id2label": { |
|
"0": "dt_zh_00", |
|
"1": "dt_zh_01", |
|
"2": "dt_zh_02", |
|
"3": "dt_zh_03", |
|
"4": "dt_zh_04", |
|
"5": "dt_zh_05", |
|
"6": "dt_zh_06", |
|
"7": "dt_zh_07", |
|
"8": "dt_zh_08", |
|
"9": "dt_zh_09", |
|
"10": "dt_zh_10", |
|
"11": "dt_zh_11", |
|
"12": "dt_zh_12", |
|
"13": "dt_zh_13", |
|
"14": "dt_zh_14", |
|
"15": "dt_zh_15", |
|
"16": "dt_zh_16" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 3072, |
|
"label2id": { |
|
"dt_zh_00": 0, |
|
"dt_zh_01": 1, |
|
"dt_zh_02": 2, |
|
"dt_zh_03": 3, |
|
"dt_zh_04": 4, |
|
"dt_zh_05": 5, |
|
"dt_zh_06": 6, |
|
"dt_zh_07": 7, |
|
"dt_zh_08": 8, |
|
"dt_zh_09": 9, |
|
"dt_zh_10": 10, |
|
"dt_zh_11": 11, |
|
"dt_zh_12": 12, |
|
"dt_zh_13": 13, |
|
"dt_zh_14": 14, |
|
"dt_zh_15": 15, |
|
"dt_zh_16": 16 |
|
}, |
|
"layer_norm_eps": 1e-12, |
|
"max_length": 192, |
|
"max_position_embeddings": 4098, |
|
"model_type": "longformer", |
|
"num_attention_heads": 12, |
|
"num_hidden_layers": 12, |
|
"output_past": true, |
|
"pad_token_id": 1, |
|
"padding": "max_length", |
|
"pooler_fc_size": 768, |
|
"pooler_num_attention_heads": 12, |
|
"pooler_num_fc_layers": 3, |
|
"pooler_size_per_head": 128, |
|
"pooler_type": "first_token_transform", |
|
"position_embedding_type": "absolute", |
|
"problem_type": "single_label_classification", |
|
"sep_token_id": 2, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.20.0", |
|
"type_vocab_size": 2, |
|
"use_cache": true, |
|
"vocab_size": 21128 |
|
} |
|
|