run.sh 528 Bytes
Newer Older
1
2
3
4
5
# distplan in ["colossalai", "pytorch"]
export DISTPAN="colossalai"

# The following options only valid when DISTPAN="colossalai"
export TPDEGREE=1
6
export GPUNUM=4
7
8
export PLACEMENT='cpu'
export USE_SHARD_INIT=False
9
export BATCH_SIZE=1
10

11
12
13
env OMP_NUM_THREADS=12 torchrun  --standalone --nproc_per_node=${GPUNUM}  --master_port 29501  train.py  \
--dummy_data=True --tp_degree=${TPDEGREE} --batch_size=${BATCH_SIZE} --plugin='gemini' \
--placement ${PLACEMENT} --shardinit ${USE_SHARD_INIT} --distplan ${DISTPAN} 2>&1 | tee run.log