niwz
/

Text Generation
Transformers
Safetensors
Chinese
phi3
conversational
Inference Endpoints
Mini-Chinese-Phi3 / tokenizer_config.json
niuwz
modified: tokenizer_config.json
5474da7
{
"added_tokens_decoder": {
"0": {
"content": "[PAD]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"1": {
"content": "[EOS]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"2": {
"content": "[BOS]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"31998": {
"content": "\t",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"31999": {
"content": "\n",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"32000": {
"content": "[user]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"32001": {
"content": "[end]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"32002": {
"content": "[assistant]",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"additional_special_tokens": [
"[user]",
"[end]",
"[assistant]"
],
"bos_token": "[BOS]",
"chat_template": "{% for message in messages %} [user]\n {{ message }} [end]\n[assistant]{% endfor %}",
"clean_up_tokenization_spaces": true,
"eos_token": "[EOS]",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "[PAD]",
"tokenizer_class": "PreTrainedTokenizerFast"
}