File size: 227 Bytes
6921e0e
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
    "dim": 4096,
    "multiple_of": 256,
    "n_heads": 32,
    "n_layers": 32,
    "norm_eps": 1e-05,
    "vocab_size": -1,
    "model_type": "llama",
    "quantization": {
        "group_size": 64,
        "bits": 4
    }
}