models: - model: meta-llama/Llama-2-7b-hf - model: meta-llama/CodeLlama-7b-Instruct-hf parameters: density: 0.65 weight: 1.0 - model: mrm8488/llama-2-coder-7b parameters: density: 0.35 weight: 0.5 merge_method: dare_linear base_model: meta-llama/Llama-2-7b-hf parameters: int8_mask: true dtype: bfloat16