beki commited on
Commit
8e571e0
1 Parent(s): 957745b

add config

Browse files
Files changed (1) hide show
  1. config.json +3 -5
config.json CHANGED
@@ -1,9 +1,6 @@
1
  {
2
  "_name_or_path": "distilbert-base-cased",
3
  "activation": "gelu",
4
- "architectures": [
5
- "DistilBertForMaskedLM"
6
- ],
7
  "attention_dropout": 0.1,
8
  "dim": 768,
9
  "dropout": 0.1,
@@ -13,11 +10,12 @@
13
  "model_type": "distilbert",
14
  "n_heads": 12,
15
  "n_layers": 6,
 
16
  "pad_token_id": 0,
17
  "qa_dropout": 0.1,
18
  "seq_classif_dropout": 0.2,
19
  "sinusoidal_pos_embds": false,
20
  "tie_weights_": true,
21
- "transformers_version": "4.27.2",
22
- "vocab_size": 30522
23
  }
 
1
  {
2
  "_name_or_path": "distilbert-base-cased",
3
  "activation": "gelu",
 
 
 
4
  "attention_dropout": 0.1,
5
  "dim": 768,
6
  "dropout": 0.1,
 
10
  "model_type": "distilbert",
11
  "n_heads": 12,
12
  "n_layers": 6,
13
+ "output_past": true,
14
  "pad_token_id": 0,
15
  "qa_dropout": 0.1,
16
  "seq_classif_dropout": 0.2,
17
  "sinusoidal_pos_embds": false,
18
  "tie_weights_": true,
19
+ "transformers_version": "4.21.2",
20
+ "vocab_size": 28996
21
  }