File size: 363 Bytes
459cfc6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
base_model: grimjim/Llama-3-Instruct-8B-SPPO-Iter3-SimPO-merge
dtype: bfloat16
merge_method: task_arithmetic
parameters:
normalize: false
slices:
- sources:
- layer_range: [0, 32]
model: grimjim/Llama-3-Instruct-8B-SPPO-Iter3-SimPO-merge
- layer_range: [0, 32]
model: tokyotech-llm/Llama-3-Swallow-8B-Instruct-v0.1
parameters:
weight: 0.1
|