File size: 865 Bytes
256a159 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
from opencompass.models import HuggingFaceCausalLM
_meta_template = dict(
round=[
dict(role="HUMAN", begin='USER: '),
dict(role="BOT", begin=" ASSISTANT:", end='</s>', generate=True),
],
)
models = [
dict(
type=HuggingFaceCausalLM,
abbr='vicuna-13b-v1.5-16k-hf',
path="lmsys/vicuna-13b-v1.5-16k",
tokenizer_path='lmsys/vicuna-13b-v1.5-16k',
tokenizer_kwargs=dict(
padding_side='left',
truncation_side='left',
use_fast=False,
),
meta_template=_meta_template,
max_out_len=100,
max_seq_len=8192,
batch_size=8,
model_kwargs=dict(device_map='auto'),
batch_padding=False, # if false, inference with for-loop without batch padding
run_cfg=dict(num_gpus=2, num_procs=1),
end_str='</s>',
)
]
|