models: - model: grimjim/Mistral-7B-Instruct-demi-merge-v0.3-7B parameters: density: .5 weight: 1 - model: DataPilot/ArrowPro-7B-KillerWhale parameters: density: .2 weight: .5 - model: icefog72/IceBlendedLatteRP-7b parameters: density: .1 weight: .2 - model: Elizezen/Berghof-NSFW-7B parameters: density: .7 weight: .4 - model: mlabonne/OmniBeagle-7B parameters: density: .2 weight: .4 - model: Kaoeiri/AkiroErotures parameters: density: .5 weight: .7 merge_method: ties tokenizer_source: union base_model: Kaoeiri/AkiroErotures parameters: normalize: true int8_mask: true dtype: bfloat16