File size: 912 Bytes
256a159 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
from opencompass.models import HuggingFaceCausalLM
_meta_template = dict(
begin="<s>",
round=[
dict(role="HUMAN", begin='[INST]', end='[/INST]'),
dict(role="BOT", begin="", end='</s>', generate=True),
],
eos_token_id=2
)
models = [
dict(
abbr='mixtral-8x7b-instruct-v0.1',
type=HuggingFaceCausalLM,
path='mistralai/Mixtral-8x7B-Instruct-v0.1',
tokenizer_path='mistralai/Mixtral-8x7B-Instruct-v0.1',
model_kwargs=dict(
device_map='auto',
trust_remote_code=True,
),
tokenizer_kwargs=dict(
padding_side='left',
truncation_side='left',
trust_remote_code=True,
),
meta_template=_meta_template,
max_out_len=100,
max_seq_len=2048,
batch_size=8,
run_cfg=dict(num_gpus=2, num_procs=1),
end_str='</s>',
)
]
|