PyTorch
llama
alignment-handbook
Generated from Trainer
Mamba2InLlama_0_875 / mamba_config.json
Junxiong Wang
add models
e5a6ffa
raw
history blame contribute delete
355 Bytes
{
"d_model": 4096,
"ssm_cfg": {
"expand": 1,
"ngroups": 32,
"d_state": 128
},
"rms_norm_eps": 1e-05,
"vocab_size": null,
"d_inner": 4096,
"d_xb": 1024,
"intermediate_size": 14336,
"hidden_act": "silu",
"n_layer": 32,
"attn_layers": [
7,
15,
23,
31
]
}