SomeModelsMerge-7b / mergekit_config.yml
Kukedlc's picture
Upload folder using huggingface_hub
6cef70c verified
raw
history blame
1.12 kB
models:
- model: liminerity/M7-7b
# no parameters necessary for base model
- model: liminerity/M7-7b
parameters:
weight: 0.2
density: 0.88
- model: Kukedlc/NeuralSirKrishna-7b
parameters:
weight: 0.1
density: 0.66
- model: Kukedlc/MyModelsMerge-7b
parameters:
weight: 0.1
density: 0.66
- model: AurelPx/Percival_01-7b-slerp
parameters:
weight: 0.1
density: 0.33
- model: MatthieuJ/Jason1903_SLERP
parameters:
weight: 0.1
density: 0.33
- model: MTSAIR/multi_verse_model
parameters:
weight: 0.1
density: 0.66
- model: Gille/StrangeMerges_30-7B-slerp
parameters:
weight: 0.1
density: 0.55
- model: chihoonlee10/T3Q-Mistral-Orca-Math-DPO
parameters:
weight: 0.1
density: 0.22
- model: yam-peleg/Experiment28-7B
parameters:
weight: 0.1
density: 0.44
- model: mlabonne/UltraMerge-7B
parameters:
weight: 0.1
density: 0.77
merge_method: dare_ties
base_model: liminerity/M7-7b
parameters:
int8_mask: true
normalize: true
dtype: bfloat16