Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
wangsen
paddle_dbnet
Commits
86b90aa9
Commit
86b90aa9
authored
Dec 22, 2021
by
Leif
Browse files
Merge remote-tracking branch 'origin/dygraph' into dygraph
parents
801b5771
8fe1b8d3
Changes
89
Hide whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
259 additions
and
30 deletions
+259
-30
test_tipc/configs/rec_r34_vd_tps_bilstm_att_v2.0/train_infer_python.txt
...igs/rec_r34_vd_tps_bilstm_att_v2.0/train_infer_python.txt
+2
-2
test_tipc/configs/rec_r34_vd_tps_bilstm_ctc_v2.0/train_infer_python.txt
...igs/rec_r34_vd_tps_bilstm_ctc_v2.0/train_infer_python.txt
+2
-2
test_tipc/configs/rec_r50_fpn_vd_none_srn/train_infer_python.txt
...pc/configs/rec_r50_fpn_vd_none_srn/train_infer_python.txt
+2
-2
test_tipc/prepare.sh
test_tipc/prepare.sh
+85
-14
tools/eval.py
tools/eval.py
+2
-2
tools/infer/predict_cls.py
tools/infer/predict_cls.py
+0
-2
tools/infer_det.py
tools/infer_det.py
+0
-3
tools/infer_kie.py
tools/infer_kie.py
+153
-0
tools/program.py
tools/program.py
+13
-3
No files found.
test_tipc/configs/rec_r34_vd_tps_bilstm_att_v2.0/train_infer_python.txt
View file @
86b90aa9
...
...
@@ -26,7 +26,7 @@ null:null
##
===========================infer_params===========================
Global.save_inference_dir:./output/
Global.
pretrained_model
:
Global.
checkpoints
:
norm_export:tools/export_model.py -c test_tipc/configs/rec_r34_vd_tps_bilstm_att_v2.0/rec_r34_vd_tps_bilstm_att.yml -o
quant_export:null
fpgm_export:null
...
...
@@ -34,7 +34,7 @@ distill_export:null
export1:null
export2:null
##
infer
_model:
null
train
_model:
./inference/rec_r34_vd_tps_bilstm_att_v2.0_train/best_accuracy
infer_export:tools/export_model.py -c test_tipc/configs/rec_r34_vd_tps_bilstm_att_v2.0/rec_r34_vd_tps_bilstm_att.yml -o
infer_quant:False
inference:tools/infer/predict_rec.py --rec_char_dict_path=./ppocr/utils/ic15_dict.txt --rec_image_shape="3,32,100" --rec_algorithm="RARE"
...
...
test_tipc/configs/rec_r34_vd_tps_bilstm_ctc_v2.0/train_infer_python.txt
View file @
86b90aa9
...
...
@@ -26,7 +26,7 @@ null:null
##
===========================infer_params===========================
Global.save_inference_dir:./output/
Global.
pretrained_model
:
Global.
checkpoints
:
norm_export:tools/export_model.py -c test_tipc/configs/rec_r34_vd_tps_bilstm_ctc_v2.0/rec_icdar15_train.yml -o
quant_export:null
fpgm_export:null
...
...
@@ -34,7 +34,7 @@ distill_export:null
export1:null
export2:null
##
infer
_model:
null
train
_model:
./inference/rec_r34_vd_tps_bilstm_ctc_v2.0_train/best_accuracy
infer_export:tools/export_model.py -c test_tipc/configs/rec_r34_vd_tps_bilstm_ctc_v2.0/rec_icdar15_train.yml -o
infer_quant:False
inference:tools/infer/predict_rec.py --rec_char_dict_path=./ppocr/utils/ic15_dict.txt --rec_image_shape="3,32,100" --rec_algorithm="StarNet"
...
...
test_tipc/configs/rec_r50_fpn_vd_none_srn/train_infer_python.txt
View file @
86b90aa9
...
...
@@ -26,7 +26,7 @@ null:null
##
===========================infer_params===========================
Global.save_inference_dir:./output/
Global.
pretrained_model
:
Global.
checkpoints
:
norm_export:tools/export_model.py -c test_tipc/configs/rec_r50_fpn_vd_none_srn/rec_r50_fpn_srn.yml -o
quant_export:null
fpgm_export:null
...
...
@@ -34,7 +34,7 @@ distill_export:null
export1:null
export2:null
##
infer
_model:
null
train
_model:
./inference/rec_r50_vd_srn_train/best_accuracy
infer_export:tools/export_model.py -c test_tipc/configs/rec_r50_fpn_vd_none_srn/rec_r50_fpn_srn.yml -o
infer_quant:False
inference:tools/infer/predict_rec.py --rec_char_dict_path=./ppocr/utils/ic15_dict.txt --rec_image_shape="1,64,256" --rec_algorithm="SRN" --use_space_char=False
...
...
test_tipc/prepare.sh
View file @
86b90aa9
...
...
@@ -104,13 +104,17 @@ elif [ ${MODE} = "lite_train_whole_infer" ];then
elif
[
${
MODE
}
=
"whole_infer"
]
;
then
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/ch_det_data_50.tar
--no-check-certificate
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/rec_inference.tar
--no-check-certificate
cd
./inference
&&
tar
xf rec_inference.tar
&&
cd
../
cd
./inference
&&
tar
xf rec_inference.tar
&&
tar
xf ch_det_data_50.tar
&&
cd
../
if
[
${
model_name
}
=
"ch_ppocr_mobile_v2.0_det"
]
;
then
eval_model_name
=
"ch_ppocr_mobile_v2.0_det_train"
rm
-rf
./train_data/icdar2015
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_train.tar
--no-check-certificate
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar
--no-check-certificate
cd
./inference
&&
tar
xf
${
eval_model_name
}
.tar
&&
tar
xf ch_det_data_50.tar
&&
tar
xf ch_ppocr_mobile_v2.0_det_infer.tar
&&
cd
../
elif
[
${
model_name
}
=
"ch_ppocr_mobile_v2.0_det_PACT"
]
;
then
eval_model_name
=
"ch_ppocr_mobile_v2.0_det_prune_infer"
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/slim/ch_ppocr_mobile_v2.0_det_prune_infer.tar
--no-check-certificate
cd
./inference
&&
tar
xf
${
eval_model_name
}
.tar
&&
tar
xf ch_det_data_50.tar
&&
cd
../
elif
[
${
model_name
}
=
"ch_ppocr_server_v2.0_det"
]
;
then
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_train.tar
--no-check-certificate
cd
./inference
&&
tar
xf ch_ppocr_server_v2.0_det_train.tar
&&
tar
xf ch_det_data_50.tar
&&
cd
../
...
...
@@ -122,21 +126,13 @@ elif [ ${MODE} = "whole_infer" ];then
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar
--no-check-certificate
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar
--no-check-certificate
cd
./inference
&&
tar
xf ch_ppocr_server_v2.0_det_infer.tar
&&
tar
xf ch_ppocr_server_v2.0_rec_infer.tar
&&
tar
xf ch_det_data_50.tar
&&
cd
../
elif
[
${
model_name
}
=
"ch_ppocr_mobile_v2.0_rec"
]
;
then
eval_model_name
=
"ch_ppocr_mobile_v2.0_rec_infer"
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar
--no-check-certificate
cd
./inference
&&
tar
xf
${
eval_model_name
}
.tar
&&
cd
../
elif
[
${
model_name
}
=
"ch_ppocr_server_v2.0_rec"
]
;
then
eval_model_name
=
"ch_ppocr_server_v2.0_rec_infer"
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar
--no-check-certificate
cd
./inference
&&
tar
xf
${
eval_model_name
}
.tar
&&
cd
../
elif
[
${
model_name
}
=
"ch_ppocr_mobile_v2.0_rec_PACT"
]
;
then
eval_model_name
=
"ch_
PP-OCRv2_rec_slim_quant_train
"
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/
PP-OCRv2/chinese/ch_PP-OCRv2_rec_slim_quant_train
.tar
--no-check-certificate
eval_model_name
=
"ch_
ppocr_mobile_v2.0_rec_slim_infer
"
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/
dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_slim_infer
.tar
--no-check-certificate
cd
./inference
&&
tar
xf
${
eval_model_name
}
.tar
&&
cd
../
elif
[
${
model_name
}
=
"ch_ppocr_mobile_v2.0_rec_FPGM"
]
;
then
eval_model_name
=
"ch_PP-OCRv2_rec_
train
"
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_
train
.tar
--no-check-certificate
eval_model_name
=
"ch_PP-OCRv2_rec_
infer
"
wget
-nc
-P
./inference https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_
infer
.tar
--no-check-certificate
cd
./inference
&&
tar
xf
${
eval_model_name
}
.tar
&&
cd
../
fi
if
[[
${
model_name
}
=
~
"ch_PPOCRv2_det"
]]
;
then
...
...
@@ -147,7 +143,8 @@ elif [ ${MODE} = "whole_infer" ];then
if
[[
${
model_name
}
=
~
"PPOCRv2_ocr_rec"
]]
;
then
eval_model_name
=
"ch_PP-OCRv2_rec_infer"
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar
--no-check-certificate
cd
./inference
&&
tar
xf
${
eval_model_name
}
.tar
&&
cd
../
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_slim_quant_infer.tar
--no-check-certificate
cd
./inference
&&
tar
xf
${
eval_model_name
}
.tar
&&
tar
xf ch_PP-OCRv2_rec_slim_quant_infer.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"en_server_pgnetA"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/pgnet/en_server_pgnetA.tar
--no-check-certificate
...
...
@@ -157,6 +154,63 @@ elif [ ${MODE} = "whole_infer" ];then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_icdar15_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf det_r50_vd_sast_icdar15_v2.0_train.tar
&&
tar
xf ch_det_data_50.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_mv3_none_none_ctc_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_mv3_none_none_ctc_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_mv3_none_none_ctc_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_r34_vd_none_none_ctc_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_r34_vd_none_none_ctc_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_r34_vd_none_none_ctc_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_mv3_none_bilstm_ctc_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_mv3_none_bilstm_ctc_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_mv3_none_bilstm_ctc_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_r34_vd_none_bilstm_ctc_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_r34_vd_none_bilstm_ctc_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_r34_vd_none_bilstm_ctc_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_mv3_tps_bilstm_ctc_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_mv3_tps_bilstm_ctc_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_mv3_tps_bilstm_ctc_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_r34_vd_tps_bilstm_ctc_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_r34_vd_tps_bilstm_ctc_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_r34_vd_tps_bilstm_ctc_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"ch_ppocr_server_v2.0_rec"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/ch_ppocr_server_v2.0_rec_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf ch_ppocr_server_v2.0_rec_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"ch_ppocr_mobile_v2.0_rec"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf ch_ppocr_mobile_v2.0_rec_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_mtb_nrtr"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_mtb_nrtr_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_mtb_nrtr_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_mv3_tps_bilstm_att_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_mv3_tps_bilstm_att_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_mv3_tps_bilstm_att_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_r34_vd_tps_bilstm_att_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_r34_vd_tps_bilstm_att_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_r34_vd_tps_bilstm_att_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_r31_sar"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.1/rec/rec_r31_sar_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_r31_sar_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"rec_r50_fpn_vd_none_srn"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/rec_r50_vd_srn_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf rec_r50_vd_srn_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"det_r50_vd_sast_totaltext_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_totaltext_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf det_r50_vd_sast_totaltext_v2.0_train.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"det_mv3_db_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_mv3_db_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf det_mv3_db_v2.0_train.tar
&&
tar
xf ch_det_data_50.tar
&&
cd
../
...
...
@@ -165,7 +219,24 @@ elif [ ${MODE} = "whole_infer" ];then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_db_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf det_r50_vd_db_v2.0_train.tar
&&
tar
xf ch_det_data_50.tar
&&
cd
../
fi
if
[
${
model_name
}
==
"det_mv3_pse_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.1/en_det/det_mv3_pse_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf det_mv3_pse_v2.0_train.tar &
cd
../
fi
if
[
${
model_name
}
==
"det_r50_vd_pse_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.1/en_det/det_r50_vd_pse_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf det_r50_vd_pse_v2.0_train.tar &
cd
../
fi
if
[
${
model_name
}
==
"det_mv3_east_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_mv3_east_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf det_mv3_east_v2.0_train.tar &
cd
../
fi
if
[
${
model_name
}
==
"det_r50_vd_east_v2.0"
]
;
then
wget
-nc
-P
./inference/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_east_v2.0_train.tar
--no-check-certificate
cd
./inference/
&&
tar
xf det_r50_vd_east_v2.0_train.tar &
cd
../
fi
fi
if
[
${
MODE
}
=
"klquant_whole_infer"
]
;
then
wget
-nc
-P
./train_data/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/icdar2015_lite.tar
--no-check-certificate
cd
./train_data/
&&
tar
xf icdar2015_lite.tar
...
...
tools/eval.py
View file @
86b90aa9
...
...
@@ -54,7 +54,8 @@ def main():
config
[
'Architecture'
][
"Head"
][
'out_channels'
]
=
char_num
model
=
build_model
(
config
[
'Architecture'
])
extra_input
=
config
[
'Architecture'
][
'algorithm'
]
in
[
"SRN"
,
"SAR"
]
extra_input
=
config
[
'Architecture'
][
'algorithm'
]
in
[
"SRN"
,
"NRTR"
,
"SAR"
,
"SEED"
]
if
"model_type"
in
config
[
'Architecture'
].
keys
():
model_type
=
config
[
'Architecture'
][
'model_type'
]
else
:
...
...
@@ -68,7 +69,6 @@ def main():
# build metric
eval_class
=
build_metric
(
config
[
'Metric'
])
# start eval
metric
=
program
.
eval
(
model
,
valid_dataloader
,
post_process_class
,
eval_class
,
model_type
,
extra_input
)
...
...
tools/infer/predict_cls.py
View file @
86b90aa9
...
...
@@ -145,8 +145,6 @@ def main(args):
for
ino
in
range
(
len
(
img_list
)):
logger
.
info
(
"Predicts of {}:{}"
.
format
(
valid_image_file_list
[
ino
],
cls_res
[
ino
]))
logger
.
info
(
"The predict time about text angle classify module is as follows: "
)
if
__name__
==
"__main__"
:
...
...
tools/infer_det.py
View file @
86b90aa9
...
...
@@ -126,9 +126,6 @@ def main():
otstr
=
file
+
"
\t
"
+
json
.
dumps
(
dt_boxes_json
)
+
"
\n
"
fout
.
write
(
otstr
.
encode
())
save_det_path
=
os
.
path
.
dirname
(
config
[
'Global'
][
'save_res_path'
])
+
"/det_results/"
draw_det_res
(
boxes
,
config
,
src_img
,
file
,
save_det_path
)
logger
.
info
(
"success!"
)
...
...
tools/infer_kie.py
0 → 100755
View file @
86b90aa9
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
__future__
import
absolute_import
from
__future__
import
division
from
__future__
import
print_function
import
numpy
as
np
import
paddle.nn.functional
as
F
import
os
import
sys
__dir__
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
sys
.
path
.
append
(
__dir__
)
sys
.
path
.
append
(
os
.
path
.
abspath
(
os
.
path
.
join
(
__dir__
,
'..'
)))
os
.
environ
[
"FLAGS_allocator_strategy"
]
=
'auto_growth'
import
cv2
import
paddle
from
ppocr.data
import
create_operators
,
transform
from
ppocr.modeling.architectures
import
build_model
from
ppocr.utils.save_load
import
load_model
import
tools.program
as
program
import
time
def
read_class_list
(
filepath
):
dict
=
{}
with
open
(
filepath
,
"r"
)
as
f
:
lines
=
f
.
readlines
()
for
line
in
lines
:
key
,
value
=
line
.
split
(
" "
)
dict
[
key
]
=
value
.
rstrip
()
return
dict
def
draw_kie_result
(
batch
,
node
,
idx_to_cls
,
count
):
img
=
batch
[
6
].
copy
()
boxes
=
batch
[
7
]
h
,
w
=
img
.
shape
[:
2
]
pred_img
=
np
.
ones
((
h
,
w
*
2
,
3
),
dtype
=
np
.
uint8
)
*
255
max_value
,
max_idx
=
paddle
.
max
(
node
,
-
1
),
paddle
.
argmax
(
node
,
-
1
)
node_pred_label
=
max_idx
.
numpy
().
tolist
()
node_pred_score
=
max_value
.
numpy
().
tolist
()
for
i
,
box
in
enumerate
(
boxes
):
if
i
>=
len
(
node_pred_label
):
break
new_box
=
[[
box
[
0
],
box
[
1
]],
[
box
[
2
],
box
[
1
]],
[
box
[
2
],
box
[
3
]],
[
box
[
0
],
box
[
3
]]]
Pts
=
np
.
array
([
new_box
],
np
.
int32
)
cv2
.
polylines
(
img
,
[
Pts
.
reshape
((
-
1
,
1
,
2
))],
True
,
color
=
(
255
,
255
,
0
),
thickness
=
1
)
x_min
=
int
(
min
([
point
[
0
]
for
point
in
new_box
]))
y_min
=
int
(
min
([
point
[
1
]
for
point
in
new_box
]))
pred_label
=
str
(
node_pred_label
[
i
])
if
pred_label
in
idx_to_cls
:
pred_label
=
idx_to_cls
[
pred_label
]
pred_score
=
'{:.2f}'
.
format
(
node_pred_score
[
i
])
text
=
pred_label
+
'('
+
pred_score
+
')'
cv2
.
putText
(
pred_img
,
text
,
(
x_min
*
2
,
y_min
),
cv2
.
FONT_HERSHEY_SIMPLEX
,
0.5
,
(
255
,
0
,
0
),
1
)
vis_img
=
np
.
ones
((
h
,
w
*
3
,
3
),
dtype
=
np
.
uint8
)
*
255
vis_img
[:,
:
w
]
=
img
vis_img
[:,
w
:]
=
pred_img
save_kie_path
=
os
.
path
.
dirname
(
config
[
'Global'
][
'save_res_path'
])
+
"/kie_results/"
if
not
os
.
path
.
exists
(
save_kie_path
):
os
.
makedirs
(
save_kie_path
)
save_path
=
os
.
path
.
join
(
save_kie_path
,
str
(
count
)
+
".png"
)
cv2
.
imwrite
(
save_path
,
vis_img
)
logger
.
info
(
"The Kie Image saved in {}"
.
format
(
save_path
))
def
main
():
global_config
=
config
[
'Global'
]
# build model
model
=
build_model
(
config
[
'Architecture'
])
load_model
(
config
,
model
)
# create data ops
transforms
=
[]
for
op
in
config
[
'Eval'
][
'dataset'
][
'transforms'
]:
transforms
.
append
(
op
)
data_dir
=
config
[
'Eval'
][
'dataset'
][
'data_dir'
]
ops
=
create_operators
(
transforms
,
global_config
)
save_res_path
=
config
[
'Global'
][
'save_res_path'
]
class_path
=
config
[
'Global'
][
'class_path'
]
idx_to_cls
=
read_class_list
(
class_path
)
if
not
os
.
path
.
exists
(
os
.
path
.
dirname
(
save_res_path
)):
os
.
makedirs
(
os
.
path
.
dirname
(
save_res_path
))
model
.
eval
()
warmup_times
=
0
count_t
=
[]
with
open
(
save_res_path
,
"wb"
)
as
fout
:
with
open
(
config
[
'Global'
][
'infer_img'
],
"rb"
)
as
f
:
lines
=
f
.
readlines
()
for
index
,
data_line
in
enumerate
(
lines
):
if
index
==
10
:
warmup_t
=
time
.
time
()
data_line
=
data_line
.
decode
(
'utf-8'
)
substr
=
data_line
.
strip
(
"
\n
"
).
split
(
"
\t
"
)
img_path
,
label
=
data_dir
+
"/"
+
substr
[
0
],
substr
[
1
]
data
=
{
'img_path'
:
img_path
,
'label'
:
label
}
with
open
(
data
[
'img_path'
],
'rb'
)
as
f
:
img
=
f
.
read
()
data
[
'image'
]
=
img
st
=
time
.
time
()
batch
=
transform
(
data
,
ops
)
batch_pred
=
[
0
]
*
len
(
batch
)
for
i
in
range
(
len
(
batch
)):
batch_pred
[
i
]
=
paddle
.
to_tensor
(
np
.
expand_dims
(
batch
[
i
],
axis
=
0
))
st
=
time
.
time
()
node
,
edge
=
model
(
batch_pred
)
node
=
F
.
softmax
(
node
,
-
1
)
count_t
.
append
(
time
.
time
()
-
st
)
draw_kie_result
(
batch
,
node
,
idx_to_cls
,
index
)
logger
.
info
(
"success!"
)
logger
.
info
(
"It took {} s for predict {} images."
.
format
(
np
.
sum
(
count_t
),
len
(
count_t
)))
ips
=
len
(
count_t
[
warmup_times
:])
/
np
.
sum
(
count_t
[
warmup_times
:])
logger
.
info
(
"The ips is {} images/s"
.
format
(
ips
))
if
__name__
==
'__main__'
:
config
,
device
,
logger
,
vdl_writer
=
program
.
preprocess
()
main
()
tools/program.py
View file @
86b90aa9
...
...
@@ -239,6 +239,8 @@ def train(config,
else
:
if
model_type
==
'table'
or
extra_input
:
preds
=
model
(
images
,
data
=
batch
[
1
:])
elif
model_type
==
"kie"
:
preds
=
model
(
batch
)
else
:
preds
=
model
(
images
)
loss
=
loss_class
(
preds
,
batch
)
...
...
@@ -266,7 +268,7 @@ def train(config,
if
cal_metric_during_train
:
# only rec and cls need
batch
=
[
item
.
numpy
()
for
item
in
batch
]
if
model_type
==
'table'
:
if
model_type
in
[
'table'
,
'kie'
]
:
eval_class
(
preds
,
batch
)
else
:
post_result
=
post_process_class
(
preds
,
batch
[
1
])
...
...
@@ -399,17 +401,20 @@ def eval(model,
start
=
time
.
time
()
if
model_type
==
'table'
or
extra_input
:
preds
=
model
(
images
,
data
=
batch
[
1
:])
elif
model_type
==
"kie"
:
preds
=
model
(
batch
)
else
:
preds
=
model
(
images
)
batch
=
[
item
.
numpy
()
for
item
in
batch
]
# Obtain usable results from post-processing methods
total_time
+=
time
.
time
()
-
start
# Evaluate the results of the current batch
if
model_type
==
'table'
:
if
model_type
in
[
'table'
,
'kie'
]
:
eval_class
(
preds
,
batch
)
else
:
post_result
=
post_process_class
(
preds
,
batch
[
1
])
eval_class
(
post_result
,
batch
)
pbar
.
update
(
1
)
total_frame
+=
len
(
images
)
# Get final metric,eg. acc or hmean
...
...
@@ -498,8 +503,13 @@ def preprocess(is_train=False):
assert
alg
in
[
'EAST'
,
'DB'
,
'SAST'
,
'Rosetta'
,
'CRNN'
,
'STARNet'
,
'RARE'
,
'SRN'
,
'CLS'
,
'PGNet'
,
'Distillation'
,
'NRTR'
,
'TableAttn'
,
'SAR'
,
'PSE'
,
'SEED'
'SEED'
,
'SDMGR'
]
windows_not_support_list
=
[
'PSE'
]
if
platform
.
system
()
==
"Windows"
and
alg
in
windows_not_support_list
:
logger
.
warning
(
'{} is not support in Windows now'
.
format
(
windows_not_support_list
))
sys
.
exit
()
device
=
'gpu:{}'
.
format
(
dist
.
ParallelEnv
().
dev_id
)
if
use_gpu
else
'cpu'
device
=
paddle
.
set_device
(
device
)
...
...
Prev
1
2
3
4
5
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment