Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
wangsen
paddle_dbnet
Commits
f3efa9be
"driver/src/conv_driver.cpp" did not exist on "e7b8705b913c1bb7d216255f1f233ea03c096f1e"
Commit
f3efa9be
authored
Jun 09, 2021
by
LDOUBLEV
Browse files
fix test.sh bug
parent
b5aa9bde
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
8 additions
and
8 deletions
+8
-8
test/test.sh
test/test.sh
+8
-8
No files found.
test/test.sh
View file @
f3efa9be
...
...
@@ -138,18 +138,18 @@ for train_model in ${train_model_list[*]}; do
${
python
}
${
launch
}
${
trainer
}
-c
${
yml_file
}
-o
Global.epoch_num
=
${
epoch
}
Global.eval_batch_step
=
${
eval_batch_step
}
Global.auto_cast
=
${
auto_cast
}
Global.pretrained_model
=
${
pretrain
}
Global.save_model_dir
=
${
save_log
}
Global.use_gpu
=
${
use_gpu
}
Train.loader.batch_size_per_card
=
2
status_check
$?
"
${
trainer
}
"
"
${
command
}
"
"
${
status_log
}
"
command
=
"
${
python
}
${
export_model
}
-c
${
yml_file
}
-o Global.pretrained_model=
${
save_log
}
/latest Global.save_inference_dir=
${
save_log
}
/export
_infer
ence
/ Global.save_model_dir=
${
save_log
}
"
${
python
}
${
export_model
}
-c
${
yml_file
}
-o
Global.pretrained_model
=
${
save_log
}
/latest Global.save_inference_dir
=
${
save_log
}
/export
_infer
ence
/ Global.save_model_dir
=
${
save_log
}
command
=
"
${
python
}
${
export_model
}
-c
${
yml_file
}
-o Global.pretrained_model=
${
save_log
}
/latest Global.save_inference_dir=
${
save_log
}
_infer/ Global.save_model_dir=
${
save_log
}
"
${
python
}
${
export_model
}
-c
${
yml_file
}
-o
Global.pretrained_model
=
${
save_log
}
/latest Global.save_inference_dir
=
${
save_log
}
_infer/ Global.save_model_dir
=
${
save_log
}
status_check
$?
"
${
trainer
}
"
"
${
command
}
"
"
${
status_log
}
"
if
[
"
${
model_name
}
"
=
"det"
]
;
then
export
rec_batch_size_list
=(
"1"
)
inference
=
"tools/infer/predict_det.py"
det_model_dir
=
${
save_log
}
/export
_infer
ence/
det_model_dir
=
${
save_log
}
_infer
rec_model_dir
=
""
elif
[
"
${
model_name
}
"
=
"rec"
]
;
then
inference
=
"tools/infer/predict_rec.py"
rec_model_dir
=
${
save_log
}
/export
_infer
ence/
rec_model_dir
=
${
save_log
}
_infer
det_model_dir
=
""
fi
# inference
...
...
@@ -159,8 +159,8 @@ for train_model in ${train_model_list[*]}; do
for
threads
in
${
cpu_threads_list
[*]
}
;
do
for
rec_batch_size
in
${
rec_batch_size_list
[*]
}
;
do
save_log_path
=
"
${
log_path
}
/
${
model_name
}
_
${
slim_trainer
}
_cpu_usemkldnn_
${
use_mkldnn
}
_cputhreads_
${
threads
}
_recbatchnum_
${
rec_batch_size
}
_infer.log"
command
=
"
${
python
}
${
inference
}
--enable_mkldnn=
${
use_mkldnn
}
--use_gpu=False --cpu_threads=
${
threads
}
--benchmark=True --det_model_dir=
${
save_log
}
/export_inference/
--rec_batch_num=
${
rec_batch_size
}
--rec_model_dir=
${
rec_model_dir
}
--image_dir=
${
img_dir
}
--save_log_path=
${
save_log_path
}
"
${
python
}
${
inference
}
--enable_mkldnn
=
${
use_mkldnn
}
--use_gpu
=
False
--cpu_threads
=
${
threads
}
--benchmark
=
True
--det_model_dir
=
${
save_log
}
/export_inference/
--rec_batch_num
=
${
rec_batch_size
}
--rec_model_dir
=
${
rec_model_dir
}
--image_dir
=
${
img_dir
}
--save_log_path
=
${
save_log_path
}
command
=
"
${
python
}
${
inference
}
--enable_mkldnn=
${
use_mkldnn
}
--use_gpu=False --cpu_threads=
${
threads
}
--benchmark=True --det_model_dir=
${
det_model_dir
}
--rec_batch_num=
${
rec_batch_size
}
--rec_model_dir=
${
rec_model_dir
}
--image_dir=
${
img_dir
}
--save_log_path=
${
save_log_path
}
"
${
python
}
${
inference
}
--enable_mkldnn
=
${
use_mkldnn
}
--use_gpu
=
False
--cpu_threads
=
${
threads
}
--benchmark
=
True
--det_model_dir
=
${
det_model_dir
}
--rec_batch_num
=
${
rec_batch_size
}
--rec_model_dir
=
${
rec_model_dir
}
--image_dir
=
${
img_dir
}
--save_log_path
=
${
save_log_path
}
status_check
$?
"
${
inference
}
"
"
${
command
}
"
"
${
status_log
}
"
done
done
...
...
@@ -173,8 +173,8 @@ for train_model in ${train_model_list[*]}; do
fi
for
rec_batch_size
in
${
rec_batch_size_list
[*]
}
;
do
save_log_path
=
"
${
log_path
}
/
${
model_name
}
_
${
slim_trainer
}
_gpu_usetensorrt_
${
use_trt
}
_usefp16_
${
precision
}
_recbatchnum_
${
rec_batch_size
}
_infer.log"
command
=
"
${
python
}
${
inference
}
--use_gpu=True --use_tensorrt=
${
use_trt
}
--precision=
${
precision
}
--benchmark=True --det_model_dir=
${
save_log
}
/export_inference/
--rec_batch_num=
${
rec_batch_size
}
--rec_model_dir=
${
rec_model_dir
}
--image_dir=
${
img_dir
}
--save_log_path=
${
save_log_path
}
"
${
python
}
${
inference
}
--use_gpu
=
True
--use_tensorrt
=
${
use_trt
}
--precision
=
${
precision
}
--benchmark
=
True
--det_model_dir
=
${
save_log
}
/export_inference/
--rec_batch_num
=
${
rec_batch_size
}
--rec_model_dir
=
${
rec_model_dir
}
--image_dir
=
${
img_dir
}
--save_log_path
=
${
save_log_path
}
command
=
"
${
python
}
${
inference
}
--use_gpu=True --use_tensorrt=
${
use_trt
}
--precision=
${
precision
}
--benchmark=True --det_model_dir=
${
det_model_dir
}
--rec_batch_num=
${
rec_batch_size
}
--rec_model_dir=
${
rec_model_dir
}
--image_dir=
${
img_dir
}
--save_log_path=
${
save_log_path
}
"
${
python
}
${
inference
}
--use_gpu
=
True
--use_tensorrt
=
${
use_trt
}
--precision
=
${
precision
}
--benchmark
=
True
--det_model_dir
=
${
det_model_dir
}
--rec_batch_num
=
${
rec_batch_size
}
--rec_model_dir
=
${
rec_model_dir
}
--image_dir
=
${
img_dir
}
--save_log_path
=
${
save_log_path
}
status_check
$?
"
${
inference
}
"
"
${
command
}
"
"
${
status_log
}
"
done
done
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment