Spaces:
Running
Running
# template used in production for HuggingChat. | |
MODELS=`[ | |
{ | |
"name": "meta-llama/Llama-2-70b-chat-hf", | |
"description": "The latest and biggest model from Meta, fine-tuned for chat.", | |
"websiteUrl": "https://ai.meta.com/llama/", | |
"userMessageToken": "", | |
"userMessageEndToken": " [/INST] ", | |
"assistantMessageToken": "", | |
"assistantMessageEndToken": " </s><s>[INST] ", | |
"preprompt": " ", | |
"chatPromptTemplate" : "<s>[INST] <<SYS>>\n{{preprompt}}\n<</SYS>>\n\n{{#each messages}}{{#ifUser}}{{content}} [/INST] {{/ifUser}}{{#ifAssistant}}{{content}} </s><s>[INST] {{/ifAssistant}}{{/each}}", | |
"promptExamples": [ | |
{ | |
"title": "Write an email from bullet list", | |
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)" | |
}, { | |
"title": "Code a snake game", | |
"prompt": "Code a basic snake game in python, give explanations for each step." | |
}, { | |
"title": "Assist in a task", | |
"prompt": "How do I make a delicious lemon cheesecake?" | |
} | |
], | |
"parameters": { | |
"temperature": 0.1, | |
"top_p": 0.95, | |
"repetition_penalty": 1.2, | |
"top_k": 50, | |
"truncate": 1000, | |
"max_new_tokens": 1024 | |
} | |
}, | |
{ | |
"name": "codellama/CodeLlama-34b-Instruct-hf", | |
"displayName": "codellama/CodeLlama-34b-Instruct-hf", | |
"description": "Code Llama, a state of the art code model from Meta.", | |
"websiteUrl": "https://about.fb.com/news/2023/08/code-llama-ai-for-coding/", | |
"userMessageToken": "", | |
"userMessageEndToken": " [/INST] ", | |
"assistantMessageToken": "", | |
"assistantMessageEndToken": " </s><s>[INST] ", | |
"preprompt": " ", | |
"chatPromptTemplate" : "<s>[INST] <<SYS>>\n{{preprompt}}\n<</SYS>>\n\n{{#each messages}}{{#ifUser}}{{content}} [/INST] {{/ifUser}}{{#ifAssistant}}{{content}} </s><s>[INST] {{/ifAssistant}}{{/each}}", | |
"promptExamples": [ | |
{ | |
"title": "Fibonacci in Python", | |
"prompt": "Write a python function to calculate the nth fibonacci number." | |
}, { | |
"title": "JavaScript promises", | |
"prompt": "How can I wait for multiple JavaScript promises to fulfill before doing something with their values?" | |
}, { | |
"title": "Rust filesystem", | |
"prompt": "How can I load a file from disk in Rust?" | |
} | |
], | |
"parameters": { | |
"temperature": 0.1, | |
"top_p": 0.95, | |
"repetition_penalty": 1.2, | |
"top_k": 50, | |
"truncate": 1000, | |
"max_new_tokens": 2048 | |
} | |
}, | |
{ | |
"name": "tiiuae/falcon-180B-chat", | |
"displayName": "tiiuae/falcon-180B-chat", | |
"description": "Falcon-180B is a 180B parameters causal decoder-only model built by TII and trained on 3,500B tokens.", | |
"websiteUrl": "https://www.tii.ae/news/technology-innovation-institute-introduces-worlds-most-powerful-open-llm-falcon-180b", | |
"preprompt": " ", | |
"chatPromptTemplate": "System: {{preprompt}}\nUser:{{#each messages}}{{#ifUser}}{{content}}\nFalcon:{{/ifUser}}{{#ifAssistant}}{{content}}\nUser:{{/ifAssistant}}{{/each}}", | |
"parameters": { | |
"temperature": 0.1, | |
"top_p": 0.95, | |
"repetition_penalty": 1.2, | |
"top_k": 50, | |
"truncate": 1000, | |
"max_new_tokens": 1024, | |
"stop": ["User:"] | |
}, | |
"promptExamples": [ | |
{ | |
"title": "Write an email from bullet list", | |
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)" | |
}, { | |
"title": "Code a snake game", | |
"prompt": "Code a basic snake game in python, give explanations for each step." | |
}, { | |
"title": "Assist in a task", | |
"prompt": "How do I make a delicious lemon cheesecake?" | |
} | |
] | |
}, | |
{ | |
"name": "mistralai/Mistral-7B-Instruct-v0.1", | |
"displayName": "mistralai/Mistral-7B-Instruct-v0.1", | |
"description": "Mistral 7B is a new Apache 2.0 model, released by Mistral AI that outperforms Llama2 13B in benchmarks.", | |
"websiteUrl": "https://mistral.ai/news/announcing-mistral-7b/", | |
"preprompt": "", | |
"chatPromptTemplate" : "<s>{{#each messages}}{{#ifUser}}[INST] {{#if @first}}{{#if @root.preprompt}}{{@root.preprompt}}\n{{/if}}{{/if}}{{content}} [/INST]{{/ifUser}}{{#ifAssistant}}{{content}}</s>{{/ifAssistant}}{{/each}}", | |
"parameters": { | |
"temperature": 0.1, | |
"top_p": 0.95, | |
"repetition_penalty": 1.2, | |
"top_k": 50, | |
"truncate": 1000, | |
"max_new_tokens": 2048, | |
"stop": ["</s>"] | |
}, | |
"promptExamples": [ | |
{ | |
"title": "Write an email from bullet list", | |
"prompt": "As a restaurant owner, write a professional email to the supplier to get these products every week: \n\n- Wine (x10)\n- Eggs (x24)\n- Bread (x12)" | |
}, { | |
"title": "Code a snake game", | |
"prompt": "Code a basic snake game in python, give explanations for each step." | |
}, { | |
"title": "Assist in a task", | |
"prompt": "How do I make a delicious lemon cheesecake?" | |
} | |
] | |
} | |
]` | |
OLD_MODELS=`[{"name":"bigcode/starcoder"}, {"name":"OpenAssistant/oasst-sft-6-llama-30b-xor"}, {"name":"HuggingFaceH4/zephyr-7b-alpha"}]` | |
TASK_MODEL='mistralai/Mistral-7B-Instruct-v0.1' | |
APP_BASE="/chat" | |
PUBLIC_ORIGIN=https://huggingface.co | |
PUBLIC_SHARE_PREFIX=https://hf.co/chat | |
PUBLIC_ANNOUNCEMENT_BANNERS=`[]` | |
PUBLIC_APP_NAME=HuggingChat | |
PUBLIC_APP_ASSETS=huggingchat | |
PUBLIC_APP_COLOR=yellow | |
PUBLIC_APP_DESCRIPTION="Making the community's best AI chat models available to everyone." | |
PUBLIC_APP_DATA_SHARING=1 | |
PUBLIC_APP_DISCLAIMER=1 | |
RATE_LIMIT=16 | |
MESSAGES_BEFORE_LOGIN=5# how many messages a user can send in a conversation before having to login. set to 0 to force login right away | |
PUBLIC_GOOGLE_ANALYTICS_ID=G-8Q63TH4CSL | |
# Not part of the .env but set as other variables in the space | |
# ADDRESS_HEADER=X-Forwarded-For | |
# XFF_DEPTH=2 |