Spaces:
Configuration error
Configuration error
{ | |
"max_new_tokens": 200, | |
"max_new_tokens_min": 1, | |
"max_new_tokens_max": 2000, | |
"seed": -1, | |
"name1": "You", | |
"name2": "Assistant", | |
"context": "This is a conversation with your Assistant. The Assistant is very helpful and is eager to chat with you and answer your questions.", | |
"greeting": "", | |
"end_of_turn": "", | |
"custom_stopping_strings": "", | |
"stop_at_newline": false, | |
"add_bos_token": true, | |
"ban_eos_token": false, | |
"skip_special_tokens": true, | |
"truncation_length": 2048, | |
"truncation_length_min": 0, | |
"truncation_length_max": 8192, | |
"mode": "cai-chat", | |
"instruction_template": "None", | |
"chat_prompt_size": 2048, | |
"chat_prompt_size_min": 0, | |
"chat_prompt_size_max": 2048, | |
"chat_generation_attempts": 1, | |
"chat_generation_attempts_min": 1, | |
"chat_generation_attempts_max": 5, | |
"default_extensions": [], | |
"chat_default_extensions": [ | |
"gallery" | |
], | |
"presets": { | |
"default": "Default", | |
".*(alpaca|llama|llava)": "LLaMA-Precise", | |
".*pygmalion": "NovelAI-Storywriter", | |
".*RWKV": "Naive" | |
}, | |
"prompts": { | |
"default": "QA", | |
".*(gpt4chan|gpt-4chan|4chan)": "GPT-4chan", | |
".*oasst": "Open Assistant", | |
".*alpaca": "Alpaca" | |
}, | |
"lora_prompts": { | |
"default": "QA", | |
".*(alpaca-lora-7b|alpaca-lora-13b|alpaca-lora-30b)": "Alpaca" | |
} | |
} | |