0001lp / mergekit_config.yml
ehristoforu's picture
Upload folder using huggingface_hub
47edbc7 verified
raw
history blame contribute delete
392 Bytes
slices:
- sources:
- model: IlyaGusev/saiga_llama3_8b
layer_range: [0, 32]
- sources:
- model: Muhammad2003/Llama3-8B-OpenHermes-DPO
layer_range: [24, 32]
- sources:
- model: abacusai/Llama-3-Smaug-8B
layer_range: [26, 32]
- sources:
- model: NousResearch/Meta-Llama-3-8B-Instruct
layer_range: [28, 32]
merge_method: passthrough
dtype: bfloat16