File size: 437 Bytes
dfacab6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
{
"model": "NousResearch/Hermes-2-Theta-Llama-3-8B",
"base_model": null,
"revision": "885173e97ab8572b444f7db1290d5d0386e26816",
"precision": "bfloat16",
"params": 8.03,
"architectures": "LlamaForCausalLM",
"weight_type": "Original",
"status": "PENDING",
"submitted_time": "2024-06-09T21:56:21Z",
"model_type": "llama",
"job_id": -1,
"job_start_time": null,
"use_chat_template": true
} |