UniBloom / tokenizer_config.json
Chakita's picture
add tokenizer
9a10dcc
raw
history blame contribute delete
485 Bytes
{
"add_prefix_space": false,
"add_special_tokens": true,
"additional_special_tokens": [
"number0",
"number1",
"number2",
"number3",
"number4",
"number5",
"number6",
"number7",
"number8",
"number9"
],
"bos_token": "<s>",
"eos_token": "</s>",
"name_or_path": "bigscience/bloom-560m",
"pad_token": "<pad>",
"padding_side": "left",
"special_tokens_map_file": null,
"tokenizer_class": "BloomTokenizer",
"unk_token": "<unk>"
}