File size: 462 Bytes
4cbf29e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
slices:
- sources:
- model: OpenPipe/mistral-ft-optimized-1218
layer_range: [0,32]
- model: mlabonne/NeuralHermes-2.5-Mistral-7B
layer_range: [0,32]
merge_method: slerp
base_model: OpenPipe/mistral-ft-optimized-1218
parameters:
t:
- filter: self_attn
value: [0,0.5,0.3,0.7,1]
- filter: mlp
value: [1,0.5,0.7,0.3,0]
- value: 0.5
dtype: bfloat16
|