models: | |
- model: NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO | |
- model: NeverSleep/Noromaid-v0.4-Mixtral-Instruct-8x7b-Zloss | |
merge_method: slerp | |
base_model: NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO | |
parameters: | |
t: | |
- filter: self_attn | |
value: 0.15 | |
- filter: mlp | |
value: [0.35, 0.25, 0.15, 0.10] | |
- value: 0.15 # fallback for rest of tensors | |
dtype: bfloat16 | |
tokenizer_source: model:NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO | |