prepare_lite_cpp.sh 4.49 KB
Newer Older
1
2
3
4
5
6
7
8
#!/bin/bash
source ./test_tipc/common_func.sh
FILENAME=$1
dataline=$(cat ${FILENAME})
# parser params
IFS=$'\n'
lines=(${dataline})
IFS=$'\n'
cuicheng01's avatar
cuicheng01 committed
9
paddlelite_library_source=$2
10
11
12
13
14
15
16

inference_cmd=$(func_parser_value "${lines[1]}")
DEVICE=$(func_parser_value "${lines[2]}")
det_lite_model_list=$(func_parser_value "${lines[3]}")
rec_lite_model_list=$(func_parser_value "${lines[4]}")
cls_lite_model_list=$(func_parser_value "${lines[5]}")

cuicheng01's avatar
cuicheng01 committed
17
if [[ $inference_cmd =~ "det" ]]; then
18
    lite_model_list=${det_lite_model_list}
cuicheng01's avatar
cuicheng01 committed
19
elif [[ $inference_cmd =~ "rec" ]]; then
20
    lite_model_list=(${rec_lite_model_list[*]} ${cls_lite_model_list[*]})
cuicheng01's avatar
cuicheng01 committed
21
elif [[ $inference_cmd =~ "system" ]]; then
22
23
24
25
26
27
    lite_model_list=(${det_lite_model_list[*]} ${rec_lite_model_list[*]} ${cls_lite_model_list[*]})
else
    echo "inference_cmd is wrong, please check."
    exit 1
fi

cuicheng01's avatar
cuicheng01 committed
28
if [ ${DEVICE} = "ARM_CPU" ]; then
29
    valid_targets="arm"
cuicheng01's avatar
cuicheng01 committed
30
    paddlelite_library_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.android.armv8.gcc.c++_shared.with_extra.with_cv.tar.gz"
31
    end_index="66"
cuicheng01's avatar
cuicheng01 committed
32
33
    compile_with_opencl="OFF"
elif [ ${DEVICE} = "ARM_GPU_OPENCL" ]; then
34
    valid_targets="opencl"
cuicheng01's avatar
cuicheng01 committed
35
    paddlelite_library_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.armv8.clang.with_exception.with_extra.with_cv.opencl.tar.gz"
36
    end_index="71"
cuicheng01's avatar
cuicheng01 committed
37
    compile_with_opencl="ON"
38
else
cuicheng01's avatar
cuicheng01 committed
39
    echo "DEVICE only support ARM_CPU, ARM_GPU_OPENCL."
40
41
42
    exit 2    
fi

cuicheng01's avatar
cuicheng01 committed
43
# prepare paddlelite model
44
45
46
47
48
49
pip install paddlelite==2.10-rc
current_dir=${PWD}
IFS="|"
model_path=./inference_models

for model in ${lite_model_list[*]}; do
cuicheng01's avatar
cuicheng01 committed
50
    if [[ $model =~ "PP-OCRv2" ]]; then
51
        inference_model_url=https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/${model}.tar
cuicheng01's avatar
cuicheng01 committed
52
    elif [[ $model =~ "v2.0" ]]; then
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
        inference_model_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/${model}.tar
    else 
        echo "Model is wrong, please check."
        exit 3
    fi
    inference_model=${inference_model_url##*/}
    wget -nc  -P ${model_path} ${inference_model_url}
    cd ${model_path} && tar -xf ${inference_model} && cd ../
    model_dir=${model_path}/${inference_model%.*}
    model_file=${model_dir}/inference.pdmodel
    param_file=${model_dir}/inference.pdiparams
    paddle_lite_opt --model_dir=${model_dir} --model_file=${model_file} --param_file=${param_file} --valid_targets=${valid_targets} --optimize_out=${model_dir}_opt
done

# prepare test data
data_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/icdar2015_lite.tar
data_file=${data_url##*/}
wget -nc  -P ./test_data ${data_url}
cd ./test_data && tar -xf ${data_file} && rm ${data_file} && cd ../

cuicheng01's avatar
cuicheng01 committed
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
# prepare paddlelite predict library
if [[ ${paddlelite_library_source} = "download" ]]; then
    paddlelite_library_zipfile=$(echo $paddlelite_library_url | awk -F "/" '{print $NF}')
    paddlelite_library_file=${paddlelite_library_zipfile:0:${end_index}}
    wget ${paddlelite_library_url} && tar -xf ${paddlelite_library_zipfile}
    cd ${paddlelite_library_zipfile}
elif [[ ${paddlelite_library_source} = "compile" ]]; then
    git clone -b release/v2.10 https://github.com/PaddlePaddle/Paddle-Lite.git
    cd Paddle-Lite
    ./lite/tools/build_android.sh  --arch=armv8  --with_cv=ON --with_extra=ON --toolchain=clang --with_opencl=${compile_with_opencl}
    cd ../
    cp -r Paddle-Lite/build.lite.android.armv8.clang/inference_lite_lib.android.armv8/ .
    paddlelite_library_file=inference_lite_lib.android.armv8
else
    echo "paddlelite_library_source only support 'download' and 'compile'"
    exit 3
fi

# organize the required files  
mkdir -p  ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cp -r ${model_path}/*_opt.nb test_data ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cp ppocr/utils/ppocr_keys_v1.txt deploy/lite/config.txt ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cp -r ./deploy/lite/* ${paddlelite_library_file}/demo/cxx/ocr/
cp ${paddlelite_library_file}/cxx/lib/libpaddle_light_api_shared.so ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cp ${FILENAME} test_tipc/test_lite_arm_cpp.sh test_tipc/common_func.sh ${paddlelite_library_file}/demo/cxx/ocr/test_lite
cd ${paddlelite_library_file}/demo/cxx/ocr/
99
git clone https://github.com/cuicheng01/AutoLog.git
cuicheng01's avatar
cuicheng01 committed
100

cuicheng01's avatar
cuicheng01 committed
101
# compile and do some postprocess
102
103
104
105
106
make -j
sleep 1
make -j
cp ocr_db_crnn test_lite && cp test_lite/libpaddle_light_api_shared.so test_lite/libc++_shared.so
tar -cf test_lite.tar ./test_lite && cp test_lite.tar ${current_dir} && cd ${current_dir}
cuicheng01's avatar
cuicheng01 committed
107
rm -rf ${paddlelite_library_file}* && rm -rf ${model_path}