vlkn commited on
Commit
c4ca7a3
1 Parent(s): 81f1950

Training in progress, step 10

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
runs/Jun29_09-23-07_283118f9b5ed/events.out.tfevents.1688030756.283118f9b5ed.220.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:610e93b48d58117df4b8b41ceb787acb26b3c1fef81b8ba52cee1c1e405cbba1
3
+ size 4867
runs/Jun29_09-27-12_283118f9b5ed/events.out.tfevents.1688030846.283118f9b5ed.220.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3347e4bad08b127581861ca21d5562710bd9498dc7ab86589d40cd3990c81099
3
+ size 5021
special_tokens_map.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ ">>TITLE<<",
4
+ ">>ABSTRACT<<",
5
+ ">>INTRODUCTION<<",
6
+ ">>SUMMARY<<",
7
+ ">>COMMENT<<",
8
+ ">>ANSWER<<",
9
+ ">>QUESTION<<",
10
+ ">>DOMAIN<<",
11
+ ">>PREFIX<<",
12
+ ">>SUFFIX<<",
13
+ ">>MIDDLE<<"
14
+ ],
15
+ "eos_token": "<|endoftext|>",
16
+ "pad_token": "<|endoftext|>"
17
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "clean_up_tokenization_spaces": true,
4
+ "eos_token": "<|endoftext|>",
5
+ "model_max_length": 2048,
6
+ "tokenizer_class": "PreTrainedTokenizerFast"
7
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39613152ed511ee3ec42655f503069cbe8188b8b6aba7524f6470236f47feb42
3
+ size 3963