File size: 259 Bytes
e82bb87
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14


slices:
  - sources:
    - model: cognitivecomputations/Dolphin-2.9.1-Phi-3-Kensho-4.5B-abliterated-v3
      layer_range: [15, 29]
  - sources:
    - model: bunnycore/HyperLlama-3.1-8B
      layer_range: [20, 32]
merge_method: passthrough
dtype: bfloat16