L3.1-Siithamo-v0.3-8B / mergekit_config.yml
kromeurus's picture
Upload folder using huggingface_hub
d502fb0 verified
raw
history blame contribute delete
443 Bytes
base_model: merge/siithamol3.1
dtype: float32
merge_method: breadcrumbs
out_dtype: bfloat16
parameters:
int8_mask: 1.0
normalize: 0.0
slices:
- sources:
- layer_range: [0, 32]
model: merge/siithamol3.1
parameters:
density: 0.9
gamma: 0.01
weight: [0.5, 0.8, 0.9, 1.0]
- layer_range: [0, 32]
model: merge/formaxext.3.1
parameters:
density: 0.9
gamma: 0.01
weight: [0.5, 0.2, 0.1, 0.0]