base_model: meta-llama/Meta-Llama-3-8B dtype: bfloat16 merge_method: ties parameters: int8_mask: 1.0 normalize: 1.0 slices: - sources: - layer_range: [0, 32] model: meta-llama/Meta-Llama-3-8B parameters: density: - value: 0.5 weight: - value: 0.5 - layer_range: [0, 32] model: meta-llama/Meta-Llama-3-8B-Instruct parameters: density: - value: 0.5 weight: - value: 0.5 tokenizer_source: union