File size: 328 Bytes
ff91623 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
models:
- model: gemma-2-9b-it-lora-merge
parameters:
density: 0.7
weight: 0.7
- model: google/gemma-2-9b-it
parameters:
density: 0.7
weight: 0.7
merge_method: dare_ties
base_model: google/gemma-2-9b
parameters:
int8_mask: true
nomalize: true
weight: 0.7
density: 0.7
dtype: bfloat16
|