{ | |
"add_bos_token": false, | |
"add_eos_token": false, | |
"added_tokens_decoder": { | |
"0": { | |
"content": "<unk>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"1": { | |
"content": "<s>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"2": { | |
"content": "</s>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"3": { | |
"content": "<pad>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"4": { | |
"content": "<cls>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"5": { | |
"content": "<sep>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
}, | |
"6": { | |
"content": "<mask>", | |
"lstrip": false, | |
"normalized": false, | |
"rstrip": false, | |
"single_word": false, | |
"special": true | |
} | |
}, | |
"additional_special_tokens": [ | |
"<unk>", | |
"<s>", | |
"</s>", | |
"<pad>", | |
"<cls>", | |
"<sep>", | |
"<mask>" | |
], | |
"auto_map": { | |
"AutoTokenizer": [ | |
"tokenization_plamo.PlamoTokenizer", | |
null | |
] | |
}, | |
"bos_token": "<s>", | |
"chat_template": "{{ '以下はタスクを説明する指示で、文脈を説明した入力とペアになっています。要求を適切に補完するよう応答を書いてください。\n\n' }}{% for message in messages %}{% if message['role'] == 'system' %}{{ '### 指示:\n' + message['content'].strip() + '\n\n' }}{% elif message['role'] == 'user' %}{{ '### 入力:\n' + message['content'].strip() + '\n\n' }}{% endif %}{% endfor %}{{ '### 応答:' }}", | |
"clean_up_tokenization_spaces": false, | |
"cls_token": "<cls>", | |
"eos_token": "</s>", | |
"local_file_only": true, | |
"mask_token": "<mask>", | |
"model_max_length": 2048, | |
"pad_token": "<pad>", | |
"sep_token": "<sep>", | |
"sp_model_kwargs": {}, | |
"tokenizer_class": "PlamoTokenizer", | |
"tokenizer_file": null, | |
"unk_token": "<unk>" | |
} |