mentalgpt-v0.0.1 / special_tokens_map.json
tontokoton's picture
Upload tokenizer (#2)
c034410
raw
history blame contribute delete
573 Bytes
{
"bos_token": {
"content": "<|startoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"eos_token": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"pad_token": {
"content": "<|pad|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"unk_token": {
"content": "<|unk|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
}
}