from mmengine.config import read_base with read_base(): from .datasets.ARC_c.ARC_c_gen_1e0de5 import ARC_c_datasets from .datasets.ARC_e.ARC_e_gen_1e0de5 import ARC_e_datasets from .summarizers.example import summarizer datasets = sum([v for k, v in locals().items() if k.endswith("_datasets") or k == 'datasets'], []) work_dir = './outputs/llama2-chat/' from opencompass.models import VLLM llama2_meta_template = dict( round=[ dict(role="HUMAN", begin='[INST] ', end=' [/INST]'), dict(role="BOT", begin=' ', end=' ', generate=True), ], ) models = [ dict( type=VLLM, abbr='llama-2-7b-chat-vllm', path="Llama-2-7b-chat-hf", model_kwargs=dict(tensor_parallel_size=1), meta_template=llama2_meta_template, max_out_len=100, max_seq_len=2048, batch_size=1, generation_kwargs=dict(temperature=0), end_str='[INST]', run_cfg=dict(num_gpus=1, num_procs=1), ) ]