{ "add_bos_token": true, "add_eos_token": false, "add_prefix_space": true, "added_tokens_decoder": { "0": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "1": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "2": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32000": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32001": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true } }, "additional_special_tokens": [ "" ], "bos_token": "", "chat_template": "\n{%- set preamble = \"A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions. \" -%}\n{{ preamble }}\n{%- for m in messages %}\n {%- if m[\"role\"] == \"user\" %}\n {%- if m[\"content\"] is string -%}\n USER: {{ m[\"content\"] }} ASSISTANT:\n {%- elif m[\"content\"] is iterable -%}\n {%- set user_prompt = m[\"content\"] | selectattr(\"type\", \"equalto\", \"text\") | map(attribute=\"text\") | first -%}\n {%- set image = m[\"content\"] | selectattr(\"type\", \"equalto\", \"image_url\") | map(attribute=\"image_url\") | first -%}\n {%- if \"\" not in user_prompt -%}\n USER: \n{{ user_prompt }} ASSISTANT:\n {%- else -%}\n USER: {{ user_prompt }} ASSISTANT:\n {%- endif -%}\n {%- endif -%}\n {%- elif m[\"role\"] == \"assistant\" -%}\n {{ \" \" + m[\"content\"] }}\n {%- endif -%}\n{% endfor %}\n", "clean_up_tokenization_spaces": false, "eos_token": "", "legacy": false, "model_max_length": 4096, "pad_token": "", "padding_side": "left", "processor_class": "LlavaProcessor", "sp_model_kwargs": {}, "spaces_between_special_tokens": false, "tokenizer_class": "LlamaTokenizer", "unk_token": "", "use_default_system_prompt": false }