slices: - sources: - model: "hfl/llama-3-chinese-8b-instruct-v2" layer_range: [0, 10] - sources: - model: "NousResearch/Hermes-2-Theta-Llama-3-8B" layer_range: [0, 20] - sources: - model: "hfl/llama-3-chinese-8b-instruct-v2" layer_range: [10, 20] - sources: - model: "NousResearch/Hermes-2-Theta-Llama-3-8B" layer_range: [20, 32] - sources: - model: "hfl/llama-3-chinese-8b-instruct-v2" layer_range: [20, 32] merge_method: passthrough base_model: "NousResearch/Hermes-2-Theta-Llama-3-8B" dtype: bfloat16