File size: 1,279 Bytes
256a159 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
from mmengine.config import read_base
from opencompass.models import HuggingFaceCausalLM
with read_base():
from .datasets.winogrande.winogrande_gen_a027b6 import winogrande_datasets
datasets = [*winogrande_datasets]
_meta_template = dict(
round=[
dict(role='HUMAN', begin='<|User|>:', end='<eoh>\n'),
dict(role='BOT', begin='<|Bot|>:', end='<eoa>\n', generate=True),
],
)
models=[
dict(
type=HuggingFaceCausalLM,
abbr='internlm-chat-7b-hf',
path="internlm/internlm-chat-7b",
tokenizer_path='internlm/internlm-chat-7b',
tokenizer_kwargs=dict(
padding_side='left',
truncation_side='left',
use_fast=False,
trust_remote_code=True,
),
max_out_len=100,
max_seq_len=2048,
batch_size=8,
meta_template=_meta_template,
model_kwargs=dict(
trust_remote_code=True,
device_map='auto',
),
run_cfg=dict(num_gpus=1, num_procs=1),
)
]
_winogrande_all = [d['abbr'] for d in winogrande_datasets]
summarizer = dict(
summary_groups=[
{'name': 'winogrande', 'subsets': _winogrande_all},
{'name': 'winogrande_std', 'subsets': _winogrande_all, 'std': True},
]
) |