File size: 390 Bytes
2ae23d2 |
1 |
{"model": "mistral-community/mixtral-8x22B-v0.3", "base_model": null, "revision": "211b177b79ab5ef245ee334d106c27623e786882", "precision": "bfloat16", "params": 140.63, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-13T18:10:12Z", "model_type": "finetuned", "job_id": -1, "job_start_time": null, "use_chat_template": false} |