File size: 583 Bytes
784a6ce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
models:
  - model: mlabonne/ChimeraLlama-3-8B
    # No parameters necessary for base model
  - model: mlabonne/ChimeraLlama-3-8B
    parameters:
      density: 0.6
      weight: 0.2
  - model: nbeerbower/llama-3-dragonmaid-8B
    parameters:
      density: 0.55
      weight: 0.4
  - model: cognitivecomputations/dolphin-2.9-llama3-8b
    parameters:
      density: 0.55
      weight: 0.2
  - model: WesPro/F2PhenotypeDPO
    parameters:
      density: 0.55
      weight: 0.2
merge_method: dare_ties
base_model: mlabonne/ChimeraLlama-3-8B
parameters:
  int8_mask: true
dtype: float16