File size: 274 Bytes
3b661a9
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
{
  "attn_cfg": {},
  "attn_layer_idx": [],
  "d_intermediate": 0,
  "d_model": 768,
  "fused_add_norm": true,
  "n_layer": 24,
  "pad_vocab_size_multiple": 8,
  "residual_in_fp32": true,
  "rms_norm": true,
  "ssm_cfg": {},
  "tie_embeddings": true,
  "vocab_size": 50277
}