gpt-4-tokenizer-sc-tokens / special_tokens_map.json
RaymondLi's picture
Upload tokenizer
90cd208
raw
history blame contribute delete
532 Bytes
{
"additional_special_tokens": [
"<gh_stars>",
"<issue_start>",
"<commit_after>",
"<jupyter_text>",
"<fim_middle>",
"<|endoftext|>",
"<jupyter_code>",
"<filename>",
"<fim_suffix>",
"<fim_prefix>",
"<commit_msg>",
"<fim_pad>",
"<issue_comment>",
"<reponame>",
"<jupyter_start>",
"<issue_closed>",
"<commit_before>",
"<empty_output>",
"<jupyter_output>"
],
"bos_token": "<|endoftext|>",
"eos_token": "<|endoftext|>",
"unk_token": "<|endoftext|>"
}