sedrickkeh commited on
Commit
2543e3c
1 Parent(s): bf23197

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +33 -3
config.json CHANGED
@@ -1,6 +1,36 @@
1
  {
2
- "d_model": 4096,
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  "n_layer": 64,
4
- "vocab_size": 50432,
5
- "seq_len": 2048
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
 
1
  {
2
+ "architectures": [
3
+ "MambaForCausalLM"
4
+ ],
5
+ "bos_token_id": 0,
6
+ "conv_kernel": 4,
7
+ "eos_token_id": 0,
8
+ "expand": 2,
9
+ "fused_add_norm": true,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.1,
13
+ "intermediate_size": 16384,
14
+ "layer_norm_epsilon": 1e-05,
15
+ "model_type": "mamba",
16
  "n_layer": 64,
17
+ "num_hidden_layers": 64,
18
+ "pad_token_id": 0,
19
+ "pad_vocab_size_multiple": 8,
20
+ "rescale_prenorm_residual": false,
21
+ "residual_in_fp32": true,
22
+ "rms_norm": true,
23
+ "state_size": 16,
24
+ "time_step_floor": 0.0001,
25
+ "time_step_init_scheme": "random",
26
+ "time_step_max": 0.1,
27
+ "time_step_min": 0.001,
28
+ "time_step_rank": 160,
29
+ "time_step_scale": 1.0,
30
+ "torch_dtype": "float32",
31
+ "transformers_version": "4.39.0.dev0",
32
+ "use_bias": false,
33
+ "use_cache": true,
34
+ "use_conv_bias": true,
35
+ "vocab_size": 50432
36
  }