MathBloom-2 / tokenizer_config.json
Chakita's picture
add tokenizer
2a1b492
raw
history blame contribute delete
468 Bytes
{
"add_prefix_space": false,
"add_special_tokens": true,
"additional_special_tokens": [
"<BRG>",
"N_00",
"N_01",
"N_02",
"N_03",
"N_04",
"N_05",
"N_06",
"N_07",
"N_08",
"N_09"
],
"bos_token": "<s>",
"eos_token": "</s>",
"name_or_path": "bigscience/bloom-560m",
"pad_token": "<pad>",
"padding_side": "left",
"special_tokens_map_file": null,
"tokenizer_class": "BloomTokenizer",
"unk_token": "<unk>"
}