File size: 428 Bytes
67cbaf8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
merge_method: dare_ties
models:
  - model: nbeerbower/gemma2-gutenberg-9B
    parameters:
      weight: 0.25
  - model: UCLA-AGI/Gemma-2-9B-It-SPPO-Iter3
    parameters:
      weight: 0.25
  - model: ifable/gemma-2-Ifable-9B
    parameters:
      weight: 0.25
  - model: lemon07r/Gemma-2-Ataraxy-9B
    parameters:
      weight: 0.25
base_model: nbeerbower/gemma2-gutenberg-9B
parameters:
  t: [0, 0.33, 0.67, 1]
dtype: bfloat16