base_model: NousResearch/Llama-2-7b-hf | |
dtype: bfloat16 | |
merge_method: task_arithmetic | |
slices: | |
- sources: | |
- layer_range: [0, 32] | |
model: taide/TAIDE-LX-7B | |
parameters: | |
weight: 1.0 | |
- layer_range: [0, 32] | |
model: NousResearch/Llama-2-7b-chat-hf | |
parameters: | |
weight: 1.0 | |
- layer_range: [0, 32] | |
model: NousResearch/Llama-2-7b-hf |