FrankenMonarch-7B / mergekit_config.yml
mlabonne's picture
Upload folder using huggingface_hub
124e5cc verified
raw
history blame contribute delete
437 Bytes
dtype: float16
merge_method: passthrough
slices:
- sources:
- model: mlabonne/AlphaMonarch-7B
layer_range: [0,9]
- sources:
- model: mlabonne/AlphaMonarch-7B
layer_range: [5,14]
- sources:
- model: mlabonne/AlphaMonarch-7B
layer_range: [10,19]
- sources:
- model: mlabonne/AlphaMonarch-7B
layer_range: [15,24]
- sources:
- model: mlabonne/AlphaMonarch-7B
layer_range: [20,32]