Incorrect intermediate_size
Browse filesThis fixes the model for `llama.cpp` at least, untested on `transformers`.
- config.json +1 -1
config.json
CHANGED
@@ -9,7 +9,7 @@
|
|
9 |
"hidden_act": "silu",
|
10 |
"hidden_size": 3584,
|
11 |
"initializer_range": 0.02,
|
12 |
-
"intermediate_size":
|
13 |
"max_position_embeddings": 131072,
|
14 |
"max_window_layers": 28,
|
15 |
"model_type": "qwen2_moe",
|
|
|
9 |
"hidden_act": "silu",
|
10 |
"hidden_size": 3584,
|
11 |
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 20480,
|
13 |
"max_position_embeddings": 131072,
|
14 |
"max_window_layers": 28,
|
15 |
"model_type": "qwen2_moe",
|