models: - model: bineric/NorskGPT-Llama3-8b - model: mlabonne/ChimeraLlama-3-8B parameters: density: 0.55 weight: 0.6 merge_method: dare_ties base_model: bineric/NorskGPT-Llama3-8b parameters: int8_mask: true dtype: bfloat16