[ { "id": "Internal", "model_title": "Llama2-1.3B", "model_file": "ggml-model-Q8_0.gguf", "model_url": "https://", "model_info_url": "https://huggingface.co/princeton-nlp/Sheared-LLaMA-1.3B", "model_avatar": "ava0_48", "model_description": "The standard Llama2 based 1.3B LLM.", "developer": "Meta", "developer_url": "https://ai.meta.com/llama/", "context" : 2048, "temp" : 0.6, "prompt_format" : ": {{prompt}}\n: ", "top_k" : 5, "top_p" : 0.9, "model_inference" : "llama", "n_batch" : 10, "template_name" : "HumanBot", "is_ready": true, "is_internal": true }, { "id": "tinyllama-1.1B-chat-Q8", "model_title": "TinyLlama-1.1B-chat", "model_file": "mistral-7b-instruct-v0.2.Q5_K_M.gguf", "model_url": "https://huggingface.co/flyingfishinwater/goodmodels/blob/main/tinyllama-1.1B-chat-v1.0-Q8_0.gguf?download=true", "model_info_url": "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0", "model_avatar": "logo_tinyllama", "model_description": "The TinyLlama 1.1B model.", "developer": "Zhang Peiyuan", "developer_url": "https://github.com/jzhang38/TinyLlama", "context" : 4096, "temp" : 0.6, "prompt_format" : "<|system|>You are a friendly chatbot who always responds in the style of a pirate.<|user|>{{prompt}}<|assistant|>", "top_k" : 5, "top_p" : 0.9, "model_inference" : "llama", "n_batch" : 10, "template_name" : "TinyLlama", "is_ready": true, "is_internal": false }, { "id": "tinyllama-1.1B-32k-Q8", "model_title": "TinyLlama-1.1B-32k", "model_file": "mistral-7b-instruct-v0.2.Q5_K_M.gguf", "model_url": "https://huggingface.co/flyingfishinwater/goodmodels/blob/main/tinyllama-1.1B-chat-v1.0-Q8_0.gguf?download=true", "model_info_url": "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0", "model_avatar": "logo_tinyllama", "model_description": "The TinyLlama 1.1B model.", "developer": "Zhang Peiyuan", "developer_url": "https://github.com/jzhang38/TinyLlama", "context" : 4096, "temp" : 0.6, "prompt_format" : "<|system|>You are a friendly chatbot who always responds in the style of a pirate.<|user|>{{prompt}}<|assistant|>", "top_k" : 5, "top_p" : 0.9, "model_inference" : "llama", "n_batch" : 10, "template_name" : "TinyLlama", "is_ready": false, "is_internal": true }, { "id": "mistral-7b-instruct-v0.2-Q5_K_M", "model_title": "Mistral 7B Instruct v0.2 Q5_K_M", "model_file": "mistral-7b-instruct-v0.2.Q5_K_M.gguf", "model_url": "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q5_K_M.gguf?download=true", "model_info_url": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2", "model_avatar": "logo_mistralai", "model_description": "The standard Llama2 based 1.3B LLM.", "developer": "Mistral AI", "developer_url": "https://mistral.ai/", "context" : 4096, "temp" : 0.6, "prompt_format" : "[INST]{{prompt}}[/INST]", "top_k" : 5, "top_p" : 0.9, "model_inference" : "llama", "n_batch" : 10, "template_name" : "Mistral", "is_ready": true, "is_internal": false }, { "id": "mistral-7b-instruct-v0.2-Q8", "model_title": "Mistral 7B v0.2 Q5_K_M", "model_file": "mistral-7b-instruct-v0.2.Q5_K_M.gguf", "model_url": "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q8_0.gguf?download=true", "model_info_url": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2", "model_avatar": "logo_mistralai", "model_description": "The standard Llama2 based 1.3B LLM.", "developer": "Mistral AI", "developer_url": "https://mistral.ai/", "context" : 4096, "temp" : 0.6, "prompt_format" : "[INST]{{prompt}}[/INST]", "top_k" : 5, "top_p" : 0.9, "model_inference" : "llama", "n_batch" : 10, "template_name" : "Mistral", "is_ready": true, "is_internal": false }, { "id": "openchat-3.5-1210-Q5_K_M", "model_title": "OpenChat 3.5 Q5_K_M", "model_file": "mistral-7b-instruct-v0.2.Q5_K_M.gguf", "model_url": "https://huggingface.co/TheBloke/openchat-3.5-1210-GGUF/resolve/main/openchat-3.5-1210.Q5_K_M.gguf?download=true", "model_info_url": "https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2", "model_avatar": "logo_openchat", "model_description": "The standard Llama2 based 1.3B LLM.", "developer": "OpenChat Team", "developer_url": "https://openchat.team/", "context" : 4096, "temp" : 0.6, "prompt_format" : "[INST]{{prompt}}[/INST]", "top_k" : 5, "top_p" : 0.9, "model_inference" : "llama", "n_batch" : 10, "template_name" : "Mistral", "is_ready": true, "is_internal": false } ]