multi_train.sh 967 Bytes
Newer Older
wangsen's avatar
wangsen committed
1
2
3
4
5
6
# recommended paddle.__version__ == 2.0.0
#python3 -m paddle.distributed.launch --log_dir=./debug/ --gpus '0,1,2,3,4,5,6,7'  tools/train.py -c configs/rec/rec_mv3_none_bilstm_ctc.yml
wget -P ./pretrain_models/ https://paddleocr.bj.bcebos.com/pretrained/MobileNetV3_large_x0_5_pretrained.pdparams

python3 -m paddle.distributed.launch --log_dir=./debug/ --gpus '0,1,2,3,4,5,6,7'  tools/train.py -c configs/det/det_mv3_db.yml -o Global.epoch_num=50  Architecture.Backbone.scale=1.25 Global.eval_batch_step=[0,40] Train.loader.batch_size_per_card=36 Train.loader.num_workers=2 Eval.loader.num_workers=0  2>&1 | tee ./$(date '+%Y-%m-%dT%H_%M_%S')_dbnet

wangsen's avatar
wangsen committed
7
8

#numactl --cpunodebind=0,1 --membind=0,1  python3 -m paddle.distributed.launch --log_dir=./debug/ --gpus '0,1,2,3,4,5,6,7'  tools/train.py -c configs/det/det_mv3_db.yml -o Global.epoch_num=1500   Global.eval_batch_step=[0,60] Train.loader.batch_size_per_card=48 Train.loader.num_workers=8 Eval.loader.num_workers=0