big-p-128k / mergekit_config.yml
wassemgtk's picture
Upload folder using huggingface_hub
c731780 verified
raw
history blame
586 Bytes
slices:
- sources:
- layer_range: [0, 20]
model: abacusai/Llama-3-Giraffe-70B
- sources:
- layer_range: [10, 30]
model: abacusai/Llama-3-Giraffe-70B
- sources:
- layer_range: [20, 40]
model: abacusai/Llama-3-Giraffe-70B
- sources:
- layer_range: [30, 50]
model: abacusai/Llama-3-Giraffe-70B
- sources:
- layer_range: [40, 60]
model: abacusai/Llama-3-Giraffe-70B
- sources:
- layer_range: [50, 70]
model: abacusai/Llama-3-Giraffe-70B
- sources:
- layer_range: [60, 80]
model: abacusai/Llama-3-Giraffe-70B
merge_method: passthrough
dtype: float16