"docs/en/notes/compatibility.md" did not exist on "8c4f2d98cf3a34b042f953450f24594470288933"
eval_model_with_think.py 1.86 KB
Newer Older
jerrrrry's avatar
jerrrrry committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
from mmengine.config import read_base

with read_base():
    from opencompass.configs.datasets.aime2024.aime2024_gen_6e39a4 import aime2024_datasets
    from opencompass.configs.datasets.gsm8k.gsm8k_gen_1d7fe4 import gsm8k_datasets
    from opencompass.configs.datasets.mmlu.mmlu_gen_4d595a import mmlu_datasets
    from opencompass.configs.datasets.gpqa.gpqa_openai_simple_evals_gen_5aeece import gpqa_datasets
    from opencompass.configs.datasets.livecodebench.livecodebench_gen_6966bc import LCB_datasets
    from opencompass.configs.datasets.math.math_500_gen import math_datasets
    from opencompass.configs.datasets.ceval.ceval_zero_shot_gen_bd40ef import ceval_datasets
    from opencompass.configs.datasets.humaneval.humaneval_gen import humaneval_datasets
    from opencompass.configs.summarizers.example import summarizer

datasets = sum([v for k, v in locals().items() if k.endswith("_datasets") or k == 'datasets'], [])
work_dir = '/workspace/logs/' #输出日志路径

from opencompass.models import OpenAISDK

api_meta_template = dict(round=[
    dict(role='HUMAN', api_role='HUMAN'),
    dict(role='BOT', api_role='BOT', generate=True),
], )

models = [
    dict(
        abbr='DeepSeek-R1-INT8', # 输出log中记录模型名称
        type=OpenAISDK,
        path='/nvme/models/DeepSeek-R1-INT8/', # server中设置的模型名称,未设置同server中的模型路径
        openai_api_base="http://0.0.0.0:8000/v1", # api端口
        tokenizer_path="/nvme/models/DeepSeek-R1-INT8", # 模型路径,用于输入prompt准备
        key='EMPTY',
        meta_template=api_meta_template, # 对应chat验证,base请注释
        temperature=0,
        query_per_second=64,
        max_out_len=32768,
        max_seq_len=32768,
        pred_postprocessor=dict(
            type='opencompass.utils.text_postprocessors.extract_non_reasoning_content'),
        batch_size=32),
]