aidenygu commited on
Commit
2ced97b
1 Parent(s): b899c3c

Training in progress, epoch 1

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0478f79fe3b3f53f05bd72a740e2556394020758ac0c3d0a785df8c4e58a753b
3
+ size 270187117
runs/May09_20-33-27_10bd65b0d4a3/1683664418.8009727/events.out.tfevents.1683664418.10bd65b0d4a3.4892.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87054dbc6eb448bd40b518e6033eab7ffbae536615e57c57d5b05164f419668c
3
+ size 5882
runs/May09_20-33-27_10bd65b0d4a3/events.out.tfevents.1683664418.10bd65b0d4a3.4892.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97b2f298ebb42fbbea6e6f34af0c2747e9eeeefc33a35494809ced0b6701263e
3
+ size 125909
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "clean_up_tokenization_spaces": true,
3
+ "cls_token": "[CLS]",
4
+ "do_lower_case": true,
5
+ "mask_token": "[MASK]",
6
+ "model_max_length": 512,
7
+ "pad_token": "[PAD]",
8
+ "sep_token": "[SEP]",
9
+ "strip_accents": null,
10
+ "tokenize_chinese_chars": true,
11
+ "tokenizer_class": "DistilBertTokenizer",
12
+ "unk_token": "[UNK]"
13
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1355c7d95e65aabd3ff8e0af3e6e481de967f96824b561540c06844b755435f
3
+ size 3579
vocab.txt ADDED
The diff for this file is too large to render. See raw diff