quantize.sh 1.59 KB
Newer Older
jerrrrry's avatar
jerrrrry committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
#!/bin/bash

SCRIPT_DIR="$(dirname "$(readlink -f "$0")")"

# Common arguments and base model specific arguments
source "${SCRIPT_DIR}/conf/arguments.sh"

# Extra arguments of this script
MLM_DEFAULT_ARGS="--finetune --auto-detect-ckpt-format --export-te-mcore-model --sequence-parallel"

QUANT_CFG=$2

if [ -z ${QUANT_CFG} ]; then
    QUANT_CFG=fp8
    printf "${MLM_WARNING} Variable ${PURPLE}QUANT_CFG${WHITE} is not set (default: ${QUANT_CFG})!\n"
fi

if [ -z ${MLM_QUANT_CKPT} ]; then
    MLM_QUANT_CKPT=${MLM_WORK_DIR}/${MLM_MODEL_CFG}_quant
    printf "${MLM_WARNING} Variable ${PURPLE}MLM_QUANT_CKPT${WHITE} is not set (default: ${MLM_QUANT_CKPT})!\n"
fi

if [ -z ${MLM_MODEL_CKPT} ]; then
    ${LAUNCH_SCRIPT} ${SCRIPT_DIR}/quantize.py \
        ${MODEL_ARGS} \
        --tensor-model-parallel-size ${TP} \
        --expert-model-parallel-size ${EP} \
        --pipeline-model-parallel-size ${PP} \
        --tokenizer-model ${TOKENIZER_MODEL} \
	--pretrained-model-path ${HF_MODEL_CKPT} \
        --save ${MLM_QUANT_CKPT} \
        --export-quant-cfg ${QUANT_CFG} \
        --references "${MLM_REF_LABEL}" \
        ${MLM_DEFAULT_ARGS} ${MLM_EXTRA_ARGS}
else
    ${LAUNCH_SCRIPT} ${SCRIPT_DIR}/quantize.py \
        ${MODEL_ARGS} \
        --tensor-model-parallel-size ${TP} \
        --expert-model-parallel-size ${EP} \
        --pipeline-model-parallel-size ${PP} \
        --tokenizer-model ${TOKENIZER_MODEL} \
        --load ${MLM_MODEL_CKPT} \
        --save ${MLM_QUANT_CKPT} \
        --export-quant-cfg ${QUANT_CFG} \
        --references "${MLM_REF_LABEL}" \
        ${MLM_DEFAULT_ARGS} ${MLM_EXTRA_ARGS}
fi