L3.1-RP-Hero-BigTalker-8B / mergekit_config.yml
DavidAU's picture
Upload folder using huggingface_hub
00cb37e verified
raw
history blame
455 Bytes
models:
- model: G:/7B/Llama-3.1-8B-ArliAI-RPMax-v1.1
parameters:
weight: [1,1,.75,.5,.25,.25,.05,.01]
- model: G:/7B/L3-Umbral-Mind-RP-v0.3-8B
parameters:
weight: [0,0,.25,.35,.4,.25,.30,.04]
- model: G:/7B/L3-Pantheon-RP-1.0-8b
parameters:
weight: [0,0,0,.15,.35,.5,.65,.95]
merge_method: dare_ties
tokenizer_source: union
base_model: G:/7B/Llama-3.1-8B-DarkIdol-Instruct-1.2-Uncensored
dtype: bfloat16