models: - model: mistralai/Mistral-7B-v0.1 # No parameters necessary for base model - model: cognitivecomputations/WestLake-7B-v2-laser parameters: density: 0.58 weight: [0.3877, 0.1636, 0.186, 0.0502] - model: senseable/garten2-7b parameters: density: 0.58 weight: [0.234, 0.2423, 0.2148, 0.2775] - model: berkeley-nest/Starling-LM-7B-alpha parameters: density: 0.58 weight: [0.1593, 0.1573, 0.1693, 0.3413] - model: mlabonne/AlphaMonarch-7B parameters: density: 0.58 weight: [0.219, 0.4368, 0.4299, 0.331] merge_method: dare_ties base_model: mistralai/Mistral-7B-v0.1 parameters: int8_mask: true dtype: bfloat16