Mixtral-4x7B-v0.1 / mergekit_moe_config.yml
Naive
Upload folder using huggingface_hub
3703291
raw
history blame contribute delete
486 Bytes
base_model: stabilityai/japanese-stablelm-base-gamma-7b
gate_mode: random
experts:
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""
- source_model: stabilityai/japanese-stablelm-base-gamma-7b
positive_prompts:
- ""