base_model: princeton-nlp/gemma-2-9b-it-SimPO chat_template: auto dtype: bfloat16 merge_method: dare_ties models: - model: princeton-nlp/gemma-2-9b-it-SimPO - model: lemon07r/Gemma-2-Ataraxy-v2-9B parameters: density: 0.5 weight: 1 parameters: int8_mask: true