ultra0 / mergekit_config.yml
limin(gate)
Upload folder using huggingface_hub
58b0301 verified
raw
history blame
627 Bytes
slices:
- sources:
- layer_range: [0, 24]
model: starsnatched/MemGPT-2B
parameters:
density: [1, 0.7, 0.1] # density gradient
weight: 1.0
- layer_range: [0, 24]
model: liminerity/Omningotex-7b-slerp
parameters:
density: 0.5
weight: [0, 0.3, 0.7, 1] # weight gradient
- layer_range: [0, 24]
model: liminerity/binarized-ingotrix-slerp-7b
parameters:
density: 0.33
weight:
- filter: mlp
value: 0.5
- value: 0
merge_method: dare_ties
base_model: starsnatched/MemGPT-2B
parameters:
normalize: true
int8_mask: true
dtype: bfloat16