{ "dim": 3072, "ffn_dim_multiplier": 1.0, "multiple_of": 256, "n_heads": 24, "n_kv_heads": 8, "n_layers": 28, "norm_eps": 1e-05, "rope_theta": 500000.0, "use_scaled_rope": true, "vocab_size": 128256, "quantization_args": { "group_size": 32 } }