File size: 855 Bytes
256a159 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
from opencompass.models import HuggingFaceCausalLM
_meta_template = dict(
round=[
dict(role="HUMAN", begin='USER: ', end=' '),
dict(role="BOT", begin="ASSISTANT: ", end='</s>', generate=True),
],
)
models = [
dict(
type=HuggingFaceCausalLM,
abbr='wizardlm-70b-v1.0-hf',
path='WizardLM/WizardLM-70B-V1.0',
tokenizer_path='WizardLM/WizardLM-70B-V1.0',
model_kwargs=dict(
device_map='auto',
trust_remote_code=True,
),
tokenizer_kwargs=dict(
padding_side='left',
truncation_side='left',
trust_remote_code=True,
),
meta_template=_meta_template,
max_out_len=100,
max_seq_len=2048,
batch_size=8,
run_cfg=dict(num_gpus=4, num_procs=1),
end_str='</s>',
)
]
|