models: - model: cognitivecomputations/dolphin-2.2.1-mistral-7b parameters: density: [1, 0.7, 0.1] # density gradient weight: 1.0 - model: HuggingFaceH4/zephyr-7b-beta parameters: density: 0.5 weight: [0, 0.3, 0.7, 1] # weight gradient - model: NousResearch/Hermes-2-Pro-Mistral-7B parameters: density: 0.33 weight: - filter: mlp value: 0.5 - value: 0 merge_method: dare_ties base_model: mistralai/Mistral-7B-v0.1 parameters: normalize: true int8_mask: true dtype: float16