{ | |
"architectures": [ | |
"MambaForCausalLM" | |
], | |
"d_model": 1536, | |
"hidden_size": 1536, | |
"fused_add_norm": true, | |
"n_layer": 32, | |
"residual_in_fp32": true, | |
"rms_norm": true, | |
"ssm_cfg": {}, | |
"transformers_version": "4.39.0.dev0", | |
"vocab_size": 104361, | |
"num_space" : 2048, | |
"patch_size": 16, | |
"max_patches": 2048, | |
"num_channels": 3 | |
} | |