francislabounty commited on
Commit
3d92987
1 Parent(s): ae9364a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -3,7 +3,7 @@
3
  "adapter_dim": 512,
4
  "adapter_dropout": 0.0,
5
  "architectures": [
6
- "MistralForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
@@ -18,7 +18,7 @@
18
  "initializer_range": 0.02,
19
  "intermediate_size": 14336,
20
  "max_position_embeddings": 32768,
21
- "model_type": "mistral",
22
  "moe_dtype": "bfloat16",
23
  "moe_scaling": 1,
24
  "num_attention_heads": 32,
 
3
  "adapter_dim": 512,
4
  "adapter_dropout": 0.0,
5
  "architectures": [
6
+ "serpdotai/sparsetral-16x7B-v2--modeling_sparsetral.MistralForCausalLM"
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
 
18
  "initializer_range": 0.02,
19
  "intermediate_size": 14336,
20
  "max_position_embeddings": 32768,
21
+ "model_type": "sparsetral",
22
  "moe_dtype": "bfloat16",
23
  "moe_scaling": 1,
24
  "num_attention_heads": 32,