L3-ColdBrew-Arcadia / mergekit_config.yml
Theros's picture
Upload folder using huggingface_hub
dc74a9d verified
raw
history blame contribute delete
731 Bytes
merge_method: passthrough
slices:
# Lower Layers (0–11): ColdBrew’s foundation
- sources:
- layer_range: [0, 12]
model: SvalTek/L3-ColdBrew-Astrid
# Reasoning Layers (12–23): Use FPHam for logical depth
- sources:
- layer_range: [12, 24]
model: FPHam/L3-8B-Everything-COT
# Reflection Layers (24–31): Use FPHam for reasoning and reflection
- sources:
- layer_range: [24, 32]
model: FPHam/L3-8B-Everything-COT
# Duplicate Layers (24–31): Add valid parameter growth
- sources:
- layer_range: [24, 32] # First duplicate
model: FPHam/L3-8B-Everything-COT
- layer_range: [24, 32] # Second duplicate
model: FPHam/L3-8B-Everything-COT