Commit b3797475 authored by zhuwenwen's avatar zhuwenwen
Browse files

add baichuan2-vllm config

parent 31a9e33f
from mmengine.config import read_base
with read_base():
from .datasets.ARC_c.ARC_c_gen_1e0de5 import ARC_c_datasets
from .datasets.ARC_e.ARC_e_gen_1e0de5 import ARC_e_datasets
from .datasets.ceval.ceval_gen_5f30c7 import ceval_datasets
from .summarizers.example import summarizer
datasets = sum([v for k, v in locals().items() if k.endswith("_datasets") or k == 'datasets'], [])
work_dir = './outputs/baichuan2-chat/'
from opencompass.models import VLLM
baichuan2_meta_template = dict(
round=[
dict(role='HUMAN', begin='<reserved_106>'),
dict(role='BOT', begin='<reserved_107>', generate=True),
],
)
models = [
dict(
type=VLLM,
abbr='baichuan2-7b-chat-vllm',
path="Baichuan2-7B-Chat",
model_kwargs=dict(tensor_parallel_size=1),
meta_template=baichuan2_meta_template,
max_out_len=100,
max_seq_len=2048,
batch_size=1,
generation_kwargs=dict(temperature=0),
run_cfg=dict(num_gpus=1, num_procs=1),
)
]
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment