base_model: NousResearch/Meta-Llama-3-8B dtype: bfloat16 merge_method: dare_ties parameters: int8_mask: 1.0 slices: - sources: - layer_range: [0, 32] model: Weyaxi/Einstein-v6.1-Llama3-8B parameters: density: 0.1 weight: 1.0 - layer_range: [0, 32] model: asiansoul/Versatile-Llama-3-8B-1m parameters: density: 0.2 weight: 0.35 - layer_range: [0, 32] model: NousResearch/Hermes-2-Pro-Llama-3-8B parameters: density: 0.5 weight: 0.23 - layer_range: [0, 32] model: NousResearch/Meta-Llama-3-8B