jeffra commited on
Commit
8265e0f
1 Parent(s): 44afd12

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "YakForCausalLM"
4
  ],
5
  "attention_dropout": 0,
6
  "bos_token_id": 1,
@@ -49,7 +49,7 @@
49
  "intermediate_size": 4864,
50
  "max_position_embeddings": 4096,
51
  "max_sequence_length": 4096,
52
- "model_type": "yak",
53
  "moe_eval_capacity_factor": 1,
54
  "moe_layer_frequency": 1,
55
  "moe_min_capacity": 0,
 
1
  {
2
  "architectures": [
3
+ "ArcticForCausalLM"
4
  ],
5
  "attention_dropout": 0,
6
  "bos_token_id": 1,
 
49
  "intermediate_size": 4864,
50
  "max_position_embeddings": 4096,
51
  "max_sequence_length": 4096,
52
+ "model_type": "arctic",
53
  "moe_eval_capacity_factor": 1,
54
  "moe_layer_frequency": 1,
55
  "moe_min_capacity": 0,