pile-7b-250b-tokens / tokenizer_config.json
Kunhao's picture
Upload tokenizer
caefdf7
raw
history blame
342 Bytes
{
"add_prefix_space": false,
"additional_special_tokens": [
"<|endoftext|>"
],
"bos_token": "<|endoftext|>",
"clean_up_tokenization_spaces": true,
"eos_token": "<|endoftext|>",
"model_max_length": 1000000000000000019884624838656,
"tokenizer_class": "GPT2Tokenizer",
"unk_token": "<|endoftext|>",
"vocab_size": 49152
}