setu4993 commited on
Commit
cc32fa2
1 Parent(s): 082cccc

Re-export from latest version of `transformers`

Browse files
.gitattributes CHANGED
@@ -7,3 +7,4 @@
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
  *.msgpack filter=lfs diff=lfs merge=lfs -text
 
 
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
  *.msgpack filter=lfs diff=lfs merge=lfs -text
10
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -2,6 +2,7 @@
2
  "architectures": ["BertModel"],
3
  "attention_probs_dropout_prob": 0.1,
4
  "classifier_dropout": null,
 
5
  "hidden_act": "gelu",
6
  "hidden_dropout_prob": 0.1,
7
  "hidden_size": 768,
@@ -15,7 +16,7 @@
15
  "pad_token_id": 0,
16
  "position_embedding_type": "absolute",
17
  "torch_dtype": "float32",
18
- "transformers_version": "4.12.5",
19
  "type_vocab_size": 2,
20
  "use_cache": true,
21
  "vocab_size": 501153
 
2
  "architectures": ["BertModel"],
3
  "attention_probs_dropout_prob": 0.1,
4
  "classifier_dropout": null,
5
+ "gradient_checkpointing": false,
6
  "hidden_act": "gelu",
7
  "hidden_dropout_prob": 0.1,
8
  "hidden_size": 768,
 
16
  "pad_token_id": 0,
17
  "position_embedding_type": "absolute",
18
  "torch_dtype": "float32",
19
+ "transformers_version": "4.25.1",
20
  "type_vocab_size": 2,
21
  "use_cache": true,
22
  "vocab_size": 501153
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e07832909d014a85584fd6fd4d1192ef3752cf46a4dfa8825dfbae1193d6c425
3
- size 1883757089
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f45b674a2e511b204647f9e2cb056da656eedaee063a5cef435777b5a5ca1bf
3
+ size 1883775789
special_tokens_map.json CHANGED
@@ -1 +1,7 @@
1
- {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tf_model.h5 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b7f8d1dd717d1d42347cd6847867dc171ea4ebe88ee66e0edd7579984d39219
3
- size 1883974656
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2965837da030339e5e05b7fd1be813cb17b8ddaec5ed9f0a6f0ac9881019b5e
3
+ size 1883974632
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,13 +1,14 @@
1
  {
2
- "do_lower_case": false,
3
- "unk_token": "[UNK]",
4
- "sep_token": "[SEP]",
5
- "pad_token": "[PAD]",
6
  "cls_token": "[CLS]",
7
- "mask_token": "[MASK]",
8
- "tokenize_chinese_chars": true,
9
- "strip_accents": null,
10
  "do_basic_tokenize": true,
 
 
 
11
  "never_split": null,
12
- "tokenizer_class": "BertTokenizer"
 
 
 
 
 
13
  }
 
1
  {
 
 
 
 
2
  "cls_token": "[CLS]",
 
 
 
3
  "do_basic_tokenize": true,
4
+ "do_lower_case": false,
5
+ "mask_token": "[MASK]",
6
+ "model_max_length": 512,
7
  "never_split": null,
8
+ "pad_token": "[PAD]",
9
+ "sep_token": "[SEP]",
10
+ "strip_accents": null,
11
+ "tokenize_chinese_chars": true,
12
+ "tokenizer_class": "BertTokenizer",
13
+ "unk_token": "[UNK]"
14
  }