Mahou-Gutenberg-Nemo-12B / mergekit_config.yml
nbeerbower's picture
Upload folder using huggingface_hub
4e45077 verified
raw
history blame contribute delete
387 Bytes
models:
- model: flammenai/Mahou-1.3-mistral-nemo-12B
parameters:
weight: 0.5
density: 0.8
- model: nbeerbower/mistral-nemo-gutenberg-12B-v3
parameters:
weight: 0.5
density: 0.8
merge_method: della_linear
base_model: flammenai/Mahou-1.3-mistral-nemo-12B
parameters:
epsilon: 0.05
lambda: 1
int8_mask: true
dtype: bfloat16
tokenzer_source: union