tinyllama-moe-2x1.1B_v2 / mergekit_moe_config.yml
mateussj's picture
Upload folder using huggingface_hub
5805327 verified
raw
history blame contribute delete
901 Bytes
base_model: TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
gate_mode: hidden
dtype: bfloat16
experts:
- source_model: TinyLlama/TinyLlama-1.1B-intermediate-step-955k-token-2T
positive_prompts:
- "Legal"
- "Subpoena"
- "Crime"
- "Legal action"
- "Court"
- "Jurisprudence"
- "U.S. Code"
- "USC"
negative_prompts:
- "Python"
- "SQL"
- "JavaScript"
- "C++"
- "Computer"
- "Engineering"
- "coding"
- "debug"
- "troubleshoot"
- source_model: TinyLlama/TinyLlama-1.1B-intermediate-step-1195k-token-2.5T
positive_prompts:
- "Python"
- "SQL"
- "JavaScript"
- "C++"
- "Computer"
- "Engineering"
- "coding"
- "debug"
- "troubleshoot"
negative_prompts:
- "Legal"
- "Subpoena"
- "Crime"
- "Legal action"
- "Court"
- "Jurisprudence"
- "U.S. Code"
- "USC"