File size: 502 Bytes
e4cfa02 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
base_model: mitultiwari/mistral-7B-instruct-dpo
dtype: bfloat16
merge_method: dare_ties
models:
- model: mitultiwari/mistral-7B-instruct-dpo
- model: senseable/WestLake-7B-v2
parameters:
density: 0.75
weight: 0.30
- model: SanjiWatsuki/Silicon-Maid-7B
parameters:
density: 0.75
weight: 0.30
- model: jan-hq/stealth-v1.3
parameters:
density: 0.85
weight: 0.30
- model: Guilherme34/Samantha-v2
parameters:
density: 0.65
weight: 0.10
parameters:
int8_mask: true
|