lora_finetune_multi.sh 325 Bytes
Newer Older
wanglch's avatar
wanglch committed
1
2
3
4
5
6
7
8
9
10
NPROC_PER_NODE=2 \
CUDA_VISIBLE_DEVICES=5,6 swift sft \
    --model_id_or_path /home/wanglch/projects/GLM-4V/glm-4v-b \
    --model_type glm4v-9b-chat \
    --dataset coco-en-2-mini \
    --output_dir /home/wanglch/projects/saves/glm-4v/train_dtk \
    --num_train_epochs 1 \
    --eval_steps 200 \
    --sft_type lora \