deibotquestion / special_tokens_map.json
shivam001's picture
Upload tokenizer (#3)
350abaa
raw
history blame contribute delete
281 Bytes
{
"additional_special_tokens": [
">>TITLE<<",
">>ABSTRACT<<",
">>INTRODUCTION<<",
">>SUMMARY<<",
">>COMMENT<<",
">>ANSWER<<",
">>QUESTION<<",
">>DOMAIN<<",
">>PREFIX<<",
">>SUFFIX<<",
">>MIDDLE<<"
],
"eos_token": "<|endoftext|>"
}