File size: 949 Bytes
eb815d2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
{
    "add_bos_token": false,
    "add_eos_token": false,
    "additional_special_tokens": [
        "<|fim_begin|>",
        "<|fim_hole|>",
        "<|fim_end|>",
        "<|fim_pad|>",
        "<|repo_name|>",
        "<|file_sep|>"
    ],
    "bos_token": "<|startoftext|>",
    "clean_up_tokenization_spaces": false,
    "cls_token": "[CLS]",
    "eos_token": "<|endoftext|>",
    "gmask_token": "[gMASK]",
    "merges_file": null,
    "model_max_length": 1000000000000000019884624838656,
    "tokenizer_class": "PreTrainedTokenizerFast",
    "vocab_file": null,
    "pad_token": "<|endoftext|>",
    "fast_tokenizer": true,
    "chat_template": "{% for message in messages %}{% set role = message['role'] | lower %}{% if role == 'user' %}{% set role = 'HUMAN' %}{% endif %}{% set role = role | upper %}{{ '<role>' + role + '</role>' + message['content'] }}{% endfor %}{% if add_generation_prompt %}{{ '<role>ASSISTANT</role>' }}{% endif %}"
}