expriment-mla / config.json
joey00072's picture
checkpoint iter: 2000
0143254 verified
raw
history blame
351 Bytes
{
"activation": "silu",
"attn_type": "mla",
"bias": false,
"d_model": 1024,
"dropout": 0.2,
"head_dim": 64,
"hidden_dim": 1536,
"kv_lora_rank": 64,
"mlp": "GLU",
"num_heads": 46,
"num_kv_heads": 46,
"num_layers": 4,
"q_lora_rank": 192,
"rope_head_dim": 16,
"seq_len": 256,
"vocab_size": 50257,
"weight_tying": false
}