ld_requests / TheBloke /Mistral-7B-Instruct-v0.2-GPTQ_eval_request_False_GPTQ_4bit_int4_float16.json
lvkaokao's picture
Add TheBloke/Mistral-7B-Instruct-v0.2-GPTQ to eval queue
a45c23f verified
raw
history blame
No virus
479 Bytes
{"model": "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ", "revision": "main", "private": false, "params": 4.16, "architectures": "MistralForCausalLM", "quant_type": "GPTQ", "precision": "4bit", "model_params": 7.04, "model_size": 4.16, "weight_dtype": "int4", "compute_dtype": "float16", "gguf_ftype": "*Q4_0.gguf", "hardware": "gpu", "status": "Pending", "submitted_time": "2024-05-10T05:47:33Z", "model_type": "quantization", "job_id": -1, "job_start_time": null, "scripts": "ITREX"}