File size: 543 Bytes
256a159
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from mmengine.config import read_base

with read_base():
    from .datasets.lveval.lveval import LVEval_datasets as datasets
    from .models.bluelm.hf_bluelm_7b_chat_32k import models
    from .summarizers.lveval import summarizer

models[0][
    "path"
] = "/path/to/your/huggingface_models/BlueLM-7B-Chat-32K"
models[0][
    "tokenizer_path"
] = "/path/to/your/huggingface_models/BlueLM-7B-Chat-32K"
models[0]["max_seq_len"] = 32768
models[0]["generation_kwargs"] = dict(do_sample=False)
models[0]["mode"] = "mid"  # truncate in the middle