models: - model: cstr/llama3-8b-spaetzle-v34 # no parameters necessary for base model - model: sparsh35/Meta-Llama-3.1-8B-Instruct parameters: density: 0.65 weight: 0.5 merge_method: dare_ties base_model: cstr/llama3-8b-spaetzle-v34 parameters: int8_mask: true dtype: bfloat16 random_seed: 0 tokenizer_source: base