wwe180 commited on
Commit
f41631c
1 Parent(s): 77355bd

Delete mergekit_config.yml

Browse files
Files changed (1) hide show
  1. mergekit_config.yml +0 -16
mergekit_config.yml DELETED
@@ -1,16 +0,0 @@
1
- slices:
2
- - sources:
3
- - model: "Sao10K/L3-8B-Stheno-v3.1+Jiar/Llama-3-8B-Chinese"
4
- layer_range: [0, 22]
5
- - sources:
6
- - model: "NousResearch/Hermes-2-Theta-Llama-3-8B+camillop/Meta-Llama-3-8B-ORPO-ITA-llama-adapters"
7
- layer_range: [10, 22]
8
- - sources:
9
- - model: "migtissera/Llama-3-8B-Synthia-v3.5+unstoppable123/LLaMA3-8B_chinese_lora_sft_v0.2"
10
- layer_range: [0, 22]
11
- - sources:
12
- - model: "openchat/openchat-3.6-8b-20240522+hfl/llama-3-chinese-8b-instruct-v2-lora"
13
- layer_range: [10,32]
14
- merge_method: passthrough
15
- base_model: "gradientai/Llama-3-8B-Instruct-Gradient-1048k"
16
- dtype: bfloat16