File size: 336 Bytes
382fb05 |
1 2 3 4 5 6 7 8 9 10 11 12 |
base_model: ./meta-llama/Meta-Llama-3.1-70B-Instruct+Llama-3-70B-Instruct-abliterated-LORA
dtype: bfloat16
merge_method: task_arithmetic
parameters:
normalize: false
slices:
- sources:
- layer_range: [0, 80]
model: ./meta-llama/Meta-Llama-3.1-70B-Instruct+Llama-3-70B-Instruct-abliterated-LORA
parameters:
weight: 1.0
|