SuperChat-7B / mergekit_config.yml
yash21's picture
Upload folder using huggingface_hub
a1a089f verified
raw
history blame contribute delete
514 Bytes
base_model: mistralai/Mistral-7B-Instruct-v0.2
dtype: bfloat16
merge_method: dare_ties
models:
- model: mistralai/Mistral-7B-Instruct-v0.2
- model: openchat/openchat-3.5-0106
parameters:
density: 0.8
weight: 0.4
- model: OpenPipe/mistral-ft-optimized-1227
parameters:
density: 0.8
weight: 0.4
- model: berkeley-nest/Starling-LM-7B-alpha
parameters:
density: 0.8
weight: 0.5
- model: jan-hq/supermario-v2
parameters:
density: 0.8
weight: 0.3
parameters:
int8_mask: true