base_model: /content/evol_merge_storage/input_models/Mistral-7B-Instruct-v0.2_674785087 dtype: bfloat16 merge_method: task_arithmetic parameters: int8_mask: 1.0 normalize: 0.0 slices: - sources: - layer_range: [0, 8] model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 parameters: weight: 0.445198554642378 - layer_range: [0, 8] model: /content/evol_merge_storage/input_models/Starling-LM-7B-beta_581094980 parameters: weight: 0.005659554655209731 - layer_range: [0, 8] model: /content/evol_merge_storage/input_models/sol_2748493923 parameters: weight: 0.3149085451085008 - layer_range: [0, 8] model: /content/evol_merge_storage/input_models/Mistral-7B-Instruct-v0.2_674785087 - sources: - layer_range: [8, 16] model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 parameters: weight: 0.07684631401806669 - layer_range: [8, 16] model: /content/evol_merge_storage/input_models/Starling-LM-7B-beta_581094980 parameters: weight: 0.4987353671063862 - layer_range: [8, 16] model: /content/evol_merge_storage/input_models/sol_2748493923 parameters: weight: 0.5082785378119663 - layer_range: [8, 16] model: /content/evol_merge_storage/input_models/Mistral-7B-Instruct-v0.2_674785087 - sources: - layer_range: [16, 24] model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 parameters: weight: 0.5687639014384054 - layer_range: [16, 24] model: /content/evol_merge_storage/input_models/Starling-LM-7B-beta_581094980 parameters: weight: 0.5007180726769771 - layer_range: [16, 24] model: /content/evol_merge_storage/input_models/sol_2748493923 parameters: weight: 0.3855348742111532 - layer_range: [16, 24] model: /content/evol_merge_storage/input_models/Mistral-7B-Instruct-v0.2_674785087 - sources: - layer_range: [24, 32] model: /content/evol_merge_storage/input_models/NeuralBeagle14-7B_2368216670 parameters: weight: 0.17691410154696996 - layer_range: [24, 32] model: /content/evol_merge_storage/input_models/Starling-LM-7B-beta_581094980 parameters: weight: 0.2031085126199354 - layer_range: [24, 32] model: /content/evol_merge_storage/input_models/sol_2748493923 parameters: weight: 0.5603073995258283 - layer_range: [24, 32] model: /content/evol_merge_storage/input_models/Mistral-7B-Instruct-v0.2_674785087