Spaces:
Sleeping
Sleeping
.*(llama|alpac|vicuna|guanaco|koala|llava|wizardlm|metharme|pygmalion-7b|pygmalion-2|mythalion|wizard-mega|openbuddy|vigogne|h2ogpt-research|manticore): | |
model_type: 'llama' | |
.*(opt-|opt_|opt1|opt3|optfor|galactica|galpaca|pygmalion-350m): | |
model_type: 'opt' | |
.*(gpt-j|gptj|gpt4all-j|malion-6b|pygway|pygmalion-6b|dolly-v1): | |
model_type: 'gptj' | |
.*(gpt-neox|koalpaca-polyglot|polyglot.*koalpaca|polyglot-ko|polyglot_ko|pythia|stablelm|incite|dolly-v2|polycoder|h2ogpt-oig|h2ogpt-oasst1|h2ogpt-gm): | |
model_type: 'gptneox' | |
.*llama: | |
model_type: 'llama' | |
.*bloom: | |
model_type: 'bloom' | |
.*gpt2: | |
model_type: 'gpt2' | |
.*falcon: | |
model_type: 'falcon' | |
.*mpt: | |
model_type: 'mpt' | |
.*(starcoder|starchat): | |
model_type: 'starcoder' | |
.*dolly-v2: | |
model_type: 'dollyv2' | |
.*replit: | |
model_type: 'replit' | |
llama-65b-gptq-3bit: | |
groupsize: 'None' | |
.*(4bit|int4): | |
wbits: 4 | |
.*(3bit|int3): | |
wbits: 3 | |
.*(-2bit|_2bit|int2-): | |
wbits: 2 | |
.*(-1bit|_1bit|int1-): | |
wbits: 1 | |
.*(8bit|int8): | |
wbits: 8 | |
.*(-7bit|_7bit|int7-): | |
wbits: 7 | |
.*(-6bit|_6bit|int6-): | |
wbits: 6 | |
.*(-5bit|_5bit|int5-): | |
wbits: 5 | |
.*(-gr32-|-32g-|groupsize32|-32g$): | |
groupsize: 32 | |
.*(-gr64-|-64g-|groupsize64|-64g$): | |
groupsize: 64 | |
.*(gr128|128g|groupsize128): | |
groupsize: 128 | |
.*(gr1024|1024g|groupsize1024): | |
groupsize: 1024 | |
.*(oasst|openassistant-|stablelm-7b-sft-v7-epoch-3): | |
instruction_template: 'Open Assistant' | |
skip_special_tokens: false | |
(?!.*galactica)(?!.*reward).*openassistant: | |
instruction_template: 'Open Assistant' | |
skip_special_tokens: false | |
(?!.*v0)(?!.*1.1)(?!.*1_1)(?!.*stable)(?!.*chinese).*vicuna: | |
instruction_template: 'Vicuna-v0' | |
.*vicuna.*v0: | |
instruction_template: 'Vicuna-v0' | |
.*vicuna.*(1.1|1_1|1.3|1_3): | |
instruction_template: 'Vicuna-v1.1' | |
.*vicuna.*(1.5|1_5): | |
instruction_template: 'Vicuna-v1.1' | |
truncation_length: 4096 | |
.*stable.*vicuna: | |
instruction_template: 'StableVicuna' | |
(?!.*chat).*chinese-vicuna: | |
instruction_template: 'Alpaca' | |
.*chinese-vicuna.*chat: | |
instruction_template: 'Chinese-Vicuna-Chat' | |
.*alpaca: | |
instruction_template: 'Alpaca' | |
.*alpaca-native-4bit: | |
instruction_template: 'Alpaca' | |
wbits: 4 | |
groupsize: 128 | |
.*galactica: | |
skip_special_tokens: false | |
.*dolly-v[0-9]-[0-9]*b: | |
instruction_template: 'Alpaca' | |
skip_special_tokens: false | |
custom_stopping_strings: '"### End"' | |
.*koala: | |
instruction_template: 'Koala' | |
.*chatglm: | |
instruction_template: 'ChatGLM' | |
.*(metharme|pygmalion|mythalion): | |
instruction_template: 'Metharme' | |
.*llava: | |
model_type: 'llama' | |
instruction_template: 'LLaVA' | |
custom_stopping_strings: '"\n###"' | |
.*raven: | |
instruction_template: 'RWKV-Raven' | |
.*ctx8192: | |
truncation_length: 8192 | |
.*moss-moon.*sft: | |
instruction_template: 'MOSS' | |
.*stablelm-tuned: | |
instruction_template: 'StableLM' | |
truncation_length: 4096 | |
.*stablelm-base: | |
truncation_length: 4096 | |
.*galactica.*finetuned: | |
instruction_template: 'Galactica Finetuned' | |
.*galactica.*-v2: | |
instruction_template: 'Galactica v2' | |
(?!.*finetuned)(?!.*-v2).*galactica: | |
instruction_template: 'Galactica' | |
.*guanaco: | |
instruction_template: 'Guanaco non-chat' | |
.*baize: | |
instruction_template: 'Baize' | |
.*mpt-.*instruct: | |
instruction_template: 'Alpaca' | |
.*mpt-.*chat: | |
instruction_template: 'MPT-Chat' | |
(?!.*-flan-)(?!.*-t5-).*lamini-: | |
instruction_template: 'Alpaca' | |
.*incite.*chat: | |
instruction_template: 'INCITE-Chat' | |
.*incite.*instruct: | |
instruction_template: 'INCITE-Instruct' | |
.*wizard.*mega: | |
instruction_template: 'Wizard-Mega' | |
custom_stopping_strings: '"</s>"' | |
.*ziya-: | |
instruction_template: 'Ziya' | |
.*koalpaca: | |
instruction_template: 'KoAlpaca' | |
.*openbuddy: | |
instruction_template: 'OpenBuddy' | |
(?!.*chat).*vigogne: | |
instruction_template: 'Vigogne-Instruct' | |
.*vigogne.*chat: | |
instruction_template: 'Vigogne-Chat' | |
.*(llama-deus|supercot|llama-natural-instructions|open-llama-0.3t-7b-instruct-dolly-hhrlhf|open-llama-0.3t-7b-open-instruct): | |
instruction_template: 'Alpaca' | |
.*bactrian: | |
instruction_template: 'Bactrian' | |
.*(h2ogpt-oig-|h2ogpt-oasst1-|h2ogpt-research-oasst1-): | |
instruction_template: 'H2O-human_bot' | |
.*h2ogpt-gm-: | |
instruction_template: 'H2O-prompt_answer' | |
.*manticore: | |
instruction_template: 'Manticore Chat' | |
.*bluemoonrp-(30|13)b: | |
instruction_template: 'Bluemoon' | |
truncation_length: 4096 | |
.*Nous-Hermes-13b: | |
instruction_template: 'Alpaca' | |
.*airoboros: | |
instruction_template: 'Vicuna-v1.1' | |
.*airoboros.*1.2: | |
instruction_template: 'Airoboros-v1.2' | |
.*alpa(cino|sta): | |
instruction_template: 'Alpaca' | |
.*hippogriff: | |
instruction_template: 'Hippogriff' | |
.*lazarus: | |
instruction_template: 'Alpaca' | |
.*guanaco-.*(7|13|33|65)b: | |
instruction_template: 'Guanaco' | |
.*hypermantis: | |
instruction_template: 'Alpaca' | |
.*open-llama-.*-open-instruct: | |
instruction_template: 'Alpaca' | |
.*starcoder-gpteacher-code-instruct: | |
instruction_template: 'Alpaca' | |
.*tulu: | |
instruction_template: 'Tulu' | |
.*chronos: | |
instruction_template: 'Alpaca' | |
.*samantha: | |
instruction_template: 'Samantha' | |
.*wizardcoder: | |
instruction_template: 'Alpaca' | |
.*starchat-beta: | |
instruction_template: 'Starchat-Beta' | |
custom_stopping_strings: '"<|end|>"' | |
.*minotaur: | |
instruction_template: 'Minotaur' | |
.*minotaur-15b: | |
truncation_length: 8192 | |
.*orca_mini: | |
instruction_template: 'Orca Mini' | |
.*landmark: | |
truncation_length: 8192 | |
.*superhot-8k: | |
truncation_length: 8192 | |
.*xgen.*-inst: | |
truncation_length: 8192 | |
instruction_template: 'Vicuna-v0' | |
.*(platypus|gplatty|superplatty): | |
instruction_template: 'Alpaca' | |
.*longchat: | |
instruction_template: 'Vicuna-v1.1' | |
.*vicuna-33b: | |
instruction_template: 'Vicuna-v1.1' | |
.*redmond-hermes-coder: | |
instruction_template: 'Alpaca' | |
truncation_length: 8192 | |
.*wizardcoder-15b: | |
instruction_template: 'Alpaca' | |
truncation_length: 8192 | |
.*wizardlm: | |
instruction_template: 'Vicuna-v1.1' | |
.*godzilla: | |
instruction_template: 'Alpaca' | |
.*llama-(2|v2): | |
truncation_length: 4096 | |
.*llama(-?)(2|v2).*chat: | |
instruction_template: 'Llama-v2' | |
.*newhope: | |
instruction_template: 'NewHope' | |
.*stablebeluga2: | |
instruction_template: 'StableBeluga2' | |
truncation_length: 4096 | |
.*openchat: | |
instruction_template: 'OpenChat' | |
.*falcon.*-instruct: | |
.*(openorca-platypus2): | |
instruction_template: 'OpenOrca-Platypus2' | |
custom_stopping_strings: '"### Instruction:", "### Response:"' | |
.*codellama: | |
rope_freq_base: 1000000 | |
.*codellama.*instruct: | |
instruction_template: 'Llama-v2' |