ECE-PRYMMAL-YL-1B-SLERP-V1 / mergekit_config.yml
Youlln's picture
Create mergekit_config.yml
71cfbb0 verified
raw
history blame
612 Bytes
slices:
- sources:
- model: fblgit/miniclaus-qw1.5B-UNAMGS
layer_range: [0, 24] # Adjust based on layer compatibility and model size
- model: Goekdeniz-Guelmez/Josiefied-Qwen2.5-1.5B-Instruct-abliterated-v1
layer_range: [0, 24]
merge_method: slerp
base_model: fblgit/miniclaus-qw1.5B-UNAMGS
parameters:
t:
- filter: self_attn
value: [1, 0.75, 0.5, 0.25, 0] # Adjust based on merging strategy used for high-ranked models
- filter: mlp
value: [0, 0.25, 0.5, 0.75, 1]
- value: 0.65 # Overall interpolation coefficient, tuned for balance
dtype: bfloat16