|
models: |
|
- model: /media/data5/hf_models/Mistral-7B-v0.1 |
|
|
|
- model: /media/data5/hf_models/dolphin-2.2.1-mistral-7b |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/SciPhi-Mistral-7B-32k |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/samantha-1.2-mistral-7b |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/docsgpt-7b-mistral |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/zephyr-7b-beta |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/MetaMath-Mistral-7B |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/Mistral-7B-OpenOrca |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/openchat-3.5-1210 |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/MistralHermes-CodePro-7B-v1 |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/MAmmoTH-7B-Mistral |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/OpenHermes-2.5-Mistral-7B |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/OpenHermes-2.5-neural-chat-v3-3-Slerp |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/NeuralHermes-2.5-Mistral-7B |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/Mistral-7B-Instruct-v0.2 |
|
parameters: |
|
weight: 0.08 |
|
density: 0.5 |
|
merge_method: dare_ties |
|
base_model: /media/data5/hf_models/Mistral-7B-v0.1 |
|
parameters: |
|
int8_mask: true |
|
dtype: bfloat16 |
|
|