jukebox-5b-lyrics / tokenizer_config.json
ArthurZ's picture
ArthurZ HF staff
Upload tokenizer
2624161
raw
history blame
552 Bytes
{
"max_n_lyric_tokens": 512,
"n_genres": 5,
"name_or_path": "ArthurZ/jukebox-5b-lyrics",
"special_tokens_map_file": "/home/arthur_huggingface_co/.cache/huggingface/hub/models--ArthurZ--jukebox-5b-lyrics/snapshots/2de0fe8b3a95105ef4138ce7d946e930ee029df7/special_tokens_map.json",
"tokenizer_class": "JukeboxTokenizer",
"unk_token": {
"__type": "AddedToken",
"content": "<|endoftext|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false
},
"version": [
"v2",
"v2",
"v2"
]
}