File size: 551 Bytes
1a4ff37 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
models:
- model: liminerity/M7-7b
parameters:
density: [0.87, 0.721, 0.451] # density gradient
weight: 0.876
- model: chihoonlee10/T3Q-Mistral-Orca-Math-DPO
parameters:
density: 0.232
weight: [0.36, 0.3, 0.437, 0.76] # weight gradient
- model: yam-peleg/Experiment26-7B
parameters:
density: 0.475
weight:
- filter: mlp
value: 0.5
- value: 0
merge_method: ties
base_model: LeroyDyer/Mixtral_AI_Cyber_3.1_SFT
parameters:
normalize: true
int8_mask: true
dtype: float16
|