models: - model: codellama/CodeLlama-34b-Instruct-hf parameters: density: [1, 0.7, 0.1] # density gradient weight: 1.0 - model: Phind/Phind-CodeLlama-34B-v2 parameters: density: 0.5 weight: [0, 0.3, 0.7, 1] # weight gradient merge_method: dare_ties base_model: codellama/CodeLlama-34b-Instruct-hf parameters: normalize: true int8_mask: true dtype: float16