File size: 348 Bytes
9857b7f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
models:
  - model: cognitivecomputations/dolphin-2.8-mistral-7b-v02
    parameters:
      density: 0.5
      weight: 0.5
  - model: meta-llama/Meta-Llama-3-8B
    parameters:
      density: 0.5
      weight: 0.5

merge_method: slerp
base_model: meta-llama/Meta-Llama-3-8B
parameters:
  normalize: false
  t: [0.0, 0.5, 1.0, 0.5, 0.0]
dtype: float16