sachiniyer commited on
Commit
7bb0925
1 Parent(s): 0ae62c7

Upload 5 files

Browse files
config.json CHANGED
@@ -1,33 +1,24 @@
1
  {
2
- "architectures": [
3
- "DistilBertForSequenceClassification"
4
- ],
5
- "model_type": "distilbert",
6
- "vocab_size": 30522,
7
- "hidden_size": 768,
8
- "num_hidden_layers": 6,
9
- "num_attention_heads": 12,
10
- "hidden_dropout_prob": 0.1,
11
- "attention_probs_dropout_prob": 0.1,
12
- "max_position_embeddings": 512,
13
- "initializer_range": 0.02,
14
- "layer_norm_eps": 1e-12,
15
- "output_past": true,
16
- "pad_token_id": 0,
17
- "gradient_checkpointing": false,
18
- "transformers_version": "4.8.2",
19
- "distilbert": {
20
- "vocab_size": 30522,
21
- "dim": 768,
22
- "n_layers": 6,
23
- "n_heads": 12,
24
- "dropout": 0.1,
25
- "attention_dropout": 0.1,
26
- "activation": "gelu",
27
- "initializer_range": 0.02,
28
- "output_attentions": false,
29
- "output_hidden_states": false,
30
- "return_dict": true
31
- },
32
- "num_labels": 6
33
  }
 
1
  {
2
+ "_name_or_path": "distilbert-base-uncased",
3
+ "activation": "gelu",
4
+ "architectures": [
5
+ "DistilBertModel"
6
+ ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "distilbert",
14
+ "n_heads": 12,
15
+ "n_layers": 6,
16
+ "pad_token_id": 0,
17
+ "qa_dropout": 0.1,
18
+ "seq_classif_dropout": 0.2,
19
+ "sinusoidal_pos_embds": false,
20
+ "tie_weights_": true,
21
+ "torch_dtype": "float32",
22
+ "transformers_version": "4.28.1",
23
+ "vocab_size": 30522
 
 
 
 
 
 
 
 
 
24
  }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb6324bd570c81b64fce81ab244416f81f79ef5ebe85d611481802ca57af94d4
3
+ size 265484701
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "cls_token": "[CLS]",
4
+ "do_basic_tokenize": true,
5
+ "do_lower_case": true,
6
+ "mask_token": "[MASK]",
7
+ "model_max_length": 512,
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "strip_accents": null,
12
+ "tokenize_chinese_chars": true,
13
+ "tokenizer_class": "DistilBertTokenizer",
14
+ "truncation": true,
15
+ "unk_token": "[UNK]"
16
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff