File size: 346 Bytes
456664c |
1 2 3 4 5 6 7 8 9 10 11 12 |
base_model: meta-llama/Meta-Llama-3.1-8B-Instruct+grimjim/Llama-3-Instruct-abliteration-LoRA-8B
dtype: bfloat16
merge_method: task_arithmetic
parameters:
normalize: false
slices:
- sources:
- layer_range: [0, 32]
model: meta-llama/Meta-Llama-3.1-8B-Instruct+grimjim/Llama-3-Instruct-abliteration-LoRA-8B
parameters:
weight: 1.0
|