models: # Top-Loyal-Bruins-Maid-DARE-7B - model: cognitivecomputations/dolphin-2.9-llama3-8b # no parameters necessary for base model - model: Weyaxi/Einstein-v6.1-Llama3-8B parameters: weight: 0.5 density: 0.8 merge_method: dare_ties base_model: cognitivecomputations/dolphin-2.9-llama3-8b parameters: int8_mask: true dtype: bfloat16