lagodw commited on
Commit
854b298
1 Parent(s): f09ef0b

initial training

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<SEP>": 50258, "<PAD>": 50259, "<UNK>": 50257}
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "unk_token": "<UNK>", "sep_token": "<SEP>", "pad_token": "<PAD>"}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "<UNK>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "pad_token": "<PAD>", "sep_token": "<SEP>", "model_max_length": 1024, "special_tokens_map_file": null, "name_or_path": "gpt2-large", "tokenizer_class": "GPT2Tokenizer"}
vocab.json ADDED
The diff for this file is too large to render. See raw diff