dolly-v2-ggml / dolly-v2-3b-f16.meta
LLukas22's picture
3b Model
80f2d7d
raw
history blame
274 Bytes
{
"model": "GptNeoX",
"quantization": "F16",
"quantization_version": "Not_Quantized",
"container": "GGML",
"converter": "llm-rs",
"hash": "4aa7d683ef0a81e73d8a6b6b8c9b84afd14f60f63bba5ccb83e7f0f5e935c70b",
"base_model": "databricks/dolly-v2-3b"
}