TriMistral-7B-TIES / mergekit_config.yml
Muhammad2003's picture
Upload folder using huggingface_hub
8a5ff99 verified
raw
history blame
394 Bytes
models:
- model: HuggingFaceH4/zephyr-7b-beta
# no parameters necessary for base model
- model: NousResearch/Hermes-2-Pro-Mistral-7B
parameters:
density: 0.5
weight: 0.5
- model: instructlab/merlinite-7b-lab
parameters:
density: 0.5
weight: 0.3
merge_method: ties
base_model: HuggingFaceH4/zephyr-7b-beta
parameters:
normalize: true
dtype: bfloat16