File size: 345 Bytes
eacaf6d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17

models:
  - model: meta-llama/Llama-2-7b-hf
  - model: meta-llama/CodeLlama-7b-Instruct-hf
    parameters:
      density: 0.65
      weight: 1.0
  - model: mrm8488/llama-2-coder-7b
    parameters:
      density: 0.35
      weight: 0.5
merge_method: dare_linear
base_model: meta-llama/Llama-2-7b-hf
parameters:
  int8_mask: true
dtype: bfloat16