Upload 2 files
Browse files- darkforest_v2_step1.yml +15 -0
- darkforest_v2_step2.yml +17 -0
darkforest_v2_step1.yml
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
slices:
|
2 |
+
- sources:
|
3 |
+
- model: ../microsoft_Orca-2-13b_flat
|
4 |
+
layer_range: [0, 16]
|
5 |
+
- sources:
|
6 |
+
- model: ../KoboldAI_LLaMA2-13B-Erebus-v3
|
7 |
+
layer_range: [8, 24]
|
8 |
+
- sources:
|
9 |
+
- model: ../microsoft_Orca-2-13b_flat
|
10 |
+
layer_range: [17, 32]
|
11 |
+
- sources:
|
12 |
+
- model: ../KoboldAI_LLaMA2-13B-Erebus-v3
|
13 |
+
layer_range: [25, 40]
|
14 |
+
merge_method: passthrough
|
15 |
+
dtype: float16
|
darkforest_v2_step2.yml
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
models:
|
2 |
+
- model: "F:\\merger\\mergekit\\again1_rev"
|
3 |
+
- model: "F:\\text_translation\\Easy-Translate\\nllb-moe-54b\\jebcarter_psyonic-cetacean-20B"
|
4 |
+
parameters:
|
5 |
+
weight: 0.5
|
6 |
+
density: 1.0
|
7 |
+
- model: "F:\\text_models\\BigMaid_20B_v1.0"
|
8 |
+
parameters:
|
9 |
+
weight: 0.5
|
10 |
+
density: 1.0
|
11 |
+
merge_method: dare_ties
|
12 |
+
#tokenizer_source: union
|
13 |
+
base_model: "F:\\merger\\mergekit\\again1_rev"
|
14 |
+
parameters:
|
15 |
+
int8_mask: true
|
16 |
+
dtype: bfloat16
|
17 |
+
name: darkforestv2_dire_ties
|