OrionZheng commited on
Commit
67fb92e
1 Parent(s): eca9f18

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +7 -3
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "AutoModelForCausalLM": "modeling_openmoe.OpenMoeForCausalLM"
7
  },
8
  "attention_bias": false,
9
- "bos_token_id": 2,
10
  "dropout_rate": 0.0,
11
  "enable_comm_overlap": false,
12
  "enable_hierarchical_alltoall": false,
@@ -19,6 +19,7 @@
19
  "hidden_size": 2048,
20
  "initializer_range": 0.02,
21
  "intermediate_size": 8192,
 
22
  "layer_norm_epsilon": 1e-06,
23
  "load_balance_beam_width": 8,
24
  "load_balance_group_swap_factor": 0.4,
@@ -36,15 +37,18 @@
36
  "rms_norm_eps": 1e-06,
37
  "rope_scaling": null,
38
  "rope_theta": 10000.0,
 
39
  "router_capacity_factor_eval": 2.0,
40
  "router_capacity_factor_train": 1.25,
41
  "router_drop_tks": true,
42
  "router_min_capacity": 4,
43
  "router_noisy_policy": null,
44
  "router_topk": 2,
 
45
  "tie_word_embeddings": false,
46
  "torch_dtype": "float32",
47
  "transformers_version": "4.34.0",
48
  "use_cache": true,
49
- "vocab_size": 256384
50
- }
 
 
6
  "AutoModelForCausalLM": "modeling_openmoe.OpenMoeForCausalLM"
7
  },
8
  "attention_bias": false,
9
+ "bos_token_id": 0,
10
  "dropout_rate": 0.0,
11
  "enable_comm_overlap": false,
12
  "enable_hierarchical_alltoall": false,
 
19
  "hidden_size": 2048,
20
  "initializer_range": 0.02,
21
  "intermediate_size": 8192,
22
+ "label_smoothing": 0.001,
23
  "layer_norm_epsilon": 1e-06,
24
  "load_balance_beam_width": 8,
25
  "load_balance_group_swap_factor": 0.4,
 
37
  "rms_norm_eps": 1e-06,
38
  "rope_scaling": null,
39
  "rope_theta": 10000.0,
40
+ "router_aux_loss_factor": 0.01,
41
  "router_capacity_factor_eval": 2.0,
42
  "router_capacity_factor_train": 1.25,
43
  "router_drop_tks": true,
44
  "router_min_capacity": 4,
45
  "router_noisy_policy": null,
46
  "router_topk": 2,
47
+ "router_z_loss_factor": 0.0001,
48
  "tie_word_embeddings": false,
49
  "torch_dtype": "float32",
50
  "transformers_version": "4.34.0",
51
  "use_cache": true,
52
+ "vocab_size": 256384,
53
+ "z_loss_factor": 0.01
54
+ }