File size: 274 Bytes
f636628 |
1 2 3 4 5 6 7 8 9 10 11 |
base_model: meta-llama/Meta-Llama-3.1-8B-Instruct
dtype: float16
merge_method: slerp
parameters:
t: 0.4
slices:
- sources:
- layer_range: [0, 32]
model: meta-llama/Meta-Llama-3.1-8B-Instruct
- layer_range: [0, 32]
model: hkust-nlp/dart-math-llama3-8b-prop2diff |