lordspline commited on
Commit
4bdea02
·
verified ·
1 Parent(s): 761f8a9

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -19,7 +19,7 @@
19
  "max_position_embeddings": 8192,
20
  "model_type": "gemma2",
21
  "num_attention_heads": 16,
22
- "num_hidden_layers": 42,
23
  "num_key_value_heads": 8,
24
  "pad_token_id": 0,
25
  "query_pre_attn_scalar": 224,
 
19
  "max_position_embeddings": 8192,
20
  "model_type": "gemma2",
21
  "num_attention_heads": 16,
22
+ "num_hidden_layers": 14,
23
  "num_key_value_heads": 8,
24
  "pad_token_id": 0,
25
  "query_pre_attn_scalar": 224,