Commit 4353fa59 authored by limm's avatar limm
Browse files

add part code

parents
Pipeline #2807 canceled with stages
====================
classifier.h
====================
.. doxygenstruct:: mmdeploy_classification_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_classifier_t
.. doxygenfunction:: mmdeploy_classifier_create
.. doxygenfunction:: mmdeploy_classifier_create_by_path
.. doxygenfunction:: mmdeploy_classifier_apply
.. doxygenfunction:: mmdeploy_classifier_release_result
.. doxygenfunction:: mmdeploy_classifier_destroy
.. doxygenfunction:: mmdeploy_classifier_create_v2
.. doxygenfunction:: mmdeploy_classifier_create_input
.. doxygenfunction:: mmdeploy_classifier_apply_v2
.. doxygenfunction:: mmdeploy_classifier_apply_async
.. doxygenfunction:: mmdeploy_classifier_get_result
====================
common.h
====================
.. doxygenenum:: mmdeploy_pixel_format_t
.. doxygenenum:: mmdeploy_data_type_t
.. doxygenenum:: mmdeploy_status_t
.. doxygentypedef:: mmdeploy_device_t
.. doxygentypedef:: mmdeploy_profiler_t
.. doxygenstruct:: mmdeploy_mat_t
:members:
:undoc-members:
.. doxygenstruct:: mmdeploy_rect_t
:members:
:undoc-members:
.. doxygenstruct:: mmdeploy_point_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_value_t
.. doxygentypedef:: mmdeploy_context_t
.. doxygenfunction:: mmdeploy_value_copy
.. doxygenfunction:: mmdeploy_value_destroy
.. doxygenfunction:: mmdeploy_device_create
.. doxygenfunction:: mmdeploy_device_destroy
.. doxygenfunction:: mmdeploy_profiler_create
.. doxygenfunction:: mmdeploy_profiler_destroy
.. doxygenfunction:: mmdeploy_context_create
.. doxygenfunction:: mmdeploy_context_create_by_device
.. doxygenfunction:: mmdeploy_context_destroy
.. doxygenfunction:: mmdeploy_context_add
.. doxygenfunction:: mmdeploy_common_create_input
====================
detector.h
====================
.. doxygenstruct:: mmdeploy_instance_mask_t
:members:
:undoc-members:
.. doxygenstruct:: mmdeploy_detection_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_detector_t
.. doxygenfunction:: mmdeploy_detector_create
.. doxygenfunction:: mmdeploy_detector_create_by_path
.. doxygenfunction:: mmdeploy_detector_apply
.. doxygenfunction:: mmdeploy_detector_release_result
.. doxygenfunction:: mmdeploy_detector_destroy
.. doxygenfunction:: mmdeploy_detector_create_v2
.. doxygenfunction:: mmdeploy_detector_create_input
.. doxygenfunction:: mmdeploy_detector_apply_v2
.. doxygenfunction:: mmdeploy_detector_apply_async
.. doxygenfunction:: mmdeploy_detector_get_result
====================
executor.h
====================
.. doxygentypedef:: mmdeploy_then_fn_t
.. doxygentypedef:: mmdeploy_then_fn_v2_t
.. doxygentypedef:: mmdeploy_then_fn_v3_t
.. doxygentypedef:: mmdeploy_sender_t
.. doxygentypedef:: mmdeploy_scheduler_t
.. doxygentypedef:: mmdeploy_let_value_fn_t
.. doxygenfunction:: mmdeploy_executor_inline
.. doxygenfunction:: mmdeploy_executor_system_pool
.. doxygenfunction:: mmdeploy_executor_create_thread_pool
.. doxygenfunction:: mmdeploy_executor_create_thread
.. doxygenfunction:: mmdeploy_executor_dynamic_batch
.. doxygenfunction:: mmdeploy_scheduler_destroy
.. doxygenfunction:: mmdeploy_sender_copy
.. doxygenfunction:: mmdeploy_sender_destroy
.. doxygenfunction:: mmdeploy_executor_just
.. doxygenfunction:: mmdeploy_executor_schedule
.. doxygenfunction:: mmdeploy_executor_transfer_just
.. doxygenfunction:: mmdeploy_executor_transfer
.. doxygenfunction:: mmdeploy_executor_on
.. doxygenfunction:: mmdeploy_executor_then
.. doxygenfunction:: mmdeploy_executor_let_value
.. doxygenfunction:: mmdeploy_executor_split
.. doxygenfunction:: mmdeploy_executor_when_all
.. doxygenfunction:: mmdeploy_executor_ensure_started
.. doxygenfunction:: mmdeploy_executor_start_detached
.. doxygenfunction:: mmdeploy_executor_sync_wait
.. doxygenfunction:: mmdeploy_executor_sync_wait_v2
.. doxygenfunction:: mmdeploy_executor_execute
====================
model.h
====================
.. doxygentypedef:: mmdeploy_model_t
.. doxygenfunction:: mmdeploy_model_create_by_path
.. doxygenfunction:: mmdeploy_model_create
.. doxygenfunction:: mmdeploy_model_destroy
====================
pipeline.h
====================
.. doxygentypedef:: mmdeploy_pipeline_t
.. doxygenfunction:: mmdeploy_pipeline_create_v3
.. doxygenfunction:: mmdeploy_pipeline_create_from_model
.. doxygenfunction:: mmdeploy_pipeline_apply
.. doxygenfunction:: mmdeploy_pipeline_apply_async
.. doxygenfunction:: mmdeploy_pipeline_destroy
====================
pose_detector.h
====================
.. doxygenstruct:: mmdeploy_pose_detection_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_pose_detector_t
.. doxygenfunction:: mmdeploy_pose_detector_create
.. doxygenfunction:: mmdeploy_pose_detector_create_by_path
.. doxygenfunction:: mmdeploy_pose_detector_apply
.. doxygenfunction:: mmdeploy_pose_detector_apply_bbox
.. doxygenfunction:: mmdeploy_pose_detector_release_result
.. doxygenfunction:: mmdeploy_pose_detector_destroy
.. doxygenfunction:: mmdeploy_pose_detector_create_v2
.. doxygenfunction:: mmdeploy_pose_detector_create_input
.. doxygenfunction:: mmdeploy_pose_detector_apply_v2
.. doxygenfunction:: mmdeploy_pose_detector_apply_async
.. doxygenfunction:: mmdeploy_pose_detector_get_result
====================
pose_tracker.h
====================
.. doxygentypedef:: mmdeploy_pose_tracker_t
.. doxygentypedef:: mmdeploy_pose_tracker_state_t
.. doxygenstruct:: mmdeploy_pose_tracker_param_t
:members:
:undoc-members:
.. doxygenstruct:: mmdeploy_pose_tracker_target_t
:members:
:undoc-members:
.. doxygenfunction:: mmdeploy_pose_tracker_default_params
.. doxygenfunction:: mmdeploy_pose_tracker_create
.. doxygenfunction:: mmdeploy_pose_tracker_destroy
.. doxygenfunction:: mmdeploy_pose_tracker_create_state
.. doxygenfunction:: mmdeploy_pose_tracker_destroy_state
.. doxygenfunction:: mmdeploy_pose_tracker_apply
.. doxygenfunction:: mmdeploy_pose_tracker_release_result
====================
rotated_detector.h
====================
.. doxygenstruct:: mmdeploy_rotated_detection_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_rotated_detector_t
.. doxygenfunction:: mmdeploy_rotated_detector_create
.. doxygenfunction:: mmdeploy_rotated_detector_create_by_path
.. doxygenfunction:: mmdeploy_rotated_detector_apply
.. doxygenfunction:: mmdeploy_rotated_detector_release_result
.. doxygenfunction:: mmdeploy_rotated_detector_destroy
.. doxygenfunction:: mmdeploy_rotated_detector_create_v2
.. doxygenfunction:: mmdeploy_rotated_detector_create_input
.. doxygenfunction:: mmdeploy_rotated_detector_apply_v2
.. doxygenfunction:: mmdeploy_rotated_detector_apply_async
.. doxygenfunction:: mmdeploy_rotated_detector_get_result
====================
segmentor.h
====================
.. doxygenstruct:: mmdeploy_segmentation_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_segmentor_t
.. doxygenfunction:: mmdeploy_segmentor_create
.. doxygenfunction:: mmdeploy_segmentor_create_by_path
.. doxygenfunction:: mmdeploy_segmentor_apply
.. doxygenfunction:: mmdeploy_segmentor_release_result
.. doxygenfunction:: mmdeploy_segmentor_destroy
.. doxygenfunction:: mmdeploy_segmentor_create_v2
.. doxygenfunction:: mmdeploy_segmentor_create_input
.. doxygenfunction:: mmdeploy_segmentor_apply_v2
.. doxygenfunction:: mmdeploy_segmentor_apply_async
.. doxygenfunction:: mmdeploy_segmentor_get_result
====================
text_detector.h
====================
.. doxygenstruct:: mmdeploy_text_detection_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_text_detector_t
.. doxygenfunction:: mmdeploy_text_detector_create
.. doxygenfunction:: mmdeploy_text_detector_create_by_path
.. doxygenfunction:: mmdeploy_text_detector_apply
.. doxygenfunction:: mmdeploy_text_detector_release_result
.. doxygenfunction:: mmdeploy_text_detector_destroy
.. doxygenfunction:: mmdeploy_text_detector_create_v2
.. doxygenfunction:: mmdeploy_text_detector_create_input
.. doxygenfunction:: mmdeploy_text_detector_apply_v2
.. doxygenfunction:: mmdeploy_text_detector_apply_async
.. doxygenfunction:: mmdeploy_text_detector_get_result
.. doxygentypedef:: mmdeploy_text_detector_continue_t
.. doxygenfunction:: mmdeploy_text_detector_apply_async_v3
.. doxygenfunction:: mmdeploy_text_detector_continue_async
====================
text_recognizer.h
====================
.. doxygenstruct:: mmdeploy_text_recognition_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_text_recognizer_t
.. doxygenfunction:: mmdeploy_text_recognizer_create
.. doxygenfunction:: mmdeploy_text_recognizer_create_by_path
.. doxygenfunction:: mmdeploy_text_recognizer_apply
.. doxygenfunction:: mmdeploy_text_recognizer_apply_bbox
.. doxygenfunction:: mmdeploy_text_recognizer_release_result
.. doxygenfunction:: mmdeploy_text_recognizer_destroy
.. doxygenfunction:: mmdeploy_text_recognizer_create_v2
.. doxygenfunction:: mmdeploy_text_recognizer_create_input
.. doxygenfunction:: mmdeploy_text_recognizer_apply_v2
.. doxygenfunction:: mmdeploy_text_recognizer_apply_async
.. doxygenfunction:: mmdeploy_text_recognizer_apply_async_v3
.. doxygenfunction:: mmdeploy_text_recognizer_continue_async
.. doxygenfunction:: mmdeploy_text_recognizer_get_result
====================
video_recognizer.h
====================
.. doxygenstruct:: mmdeploy_video_recognition_t
:members:
:undoc-members:
.. doxygenstruct:: mmdeploy_video_sample_info_t
:members:
:undoc-members:
.. doxygentypedef:: mmdeploy_video_recognizer_t
.. doxygenfunction:: mmdeploy_video_recognizer_create
.. doxygenfunction:: mmdeploy_video_recognizer_create_by_path
.. doxygenfunction:: mmdeploy_video_recognizer_apply
.. doxygenfunction:: mmdeploy_video_recognizer_release_result
.. doxygenfunction:: mmdeploy_video_recognizer_destroy
.. doxygenfunction:: mmdeploy_video_recognizer_create_v2
.. doxygenfunction:: mmdeploy_video_recognizer_create_input
.. doxygenfunction:: mmdeploy_video_recognizer_apply_v2
.. doxygenfunction:: mmdeploy_video_recognizer_get_result
====================
C API Reference
====================
.. toctree::
:maxdepth: 1
c/common
c/executor
c/model
c/pipeline
c/classifier
c/detector
c/pose_detector
c/pose_tracker
c/rotated_detector
c/segmentor
c/text_detector
c/text_recognizer
c/video_recognizer
========================
SDK Documentation
========================
Setup & Usage
--------------
.. toctree::
:maxdepth: 1
quick_start
profiler
API Reference
--------------
.. toctree::
:maxdepth: 1
c_api
# profiler
The SDK has ability to record the time consumption of each module in the pipeline. It's closed by default. To use this ability, two steps are required:
- Generate profiler data
- Analyze profiler Data
## Generate profiler data
Using the C interface and classification pipeline as an example, when creating the pipeline, the create api with context information needs to be used, and profiler handle needs to be added to the context. The detailed code is shown below. Running the demo normally will generate profiler data "profiler_data.txt" in the current directory.
```c++
#include <fstream>
#include <opencv2/imgcodecs/imgcodecs.hpp>
#include <string>
#include "mmdeploy/classifier.h"
int main(int argc, char* argv[]) {
if (argc != 4) {
fprintf(stderr, "usage:\n image_classification device_name dump_model_directory image_path\n");
return 1;
}
auto device_name = argv[1];
auto model_path = argv[2];
auto image_path = argv[3];
cv::Mat img = cv::imread(image_path);
if (!img.data) {
fprintf(stderr, "failed to load image: %s\n", image_path);
return 1;
}
mmdeploy_model_t model{};
mmdeploy_model_create_by_path(model_path, &model);
// create profiler and add it to context
// profiler data will save to profiler_data.txt
mmdeploy_profiler_t profiler{};
mmdeploy_profiler_create("profiler_data.txt", &profiler);
mmdeploy_context_t context{};
mmdeploy_context_create_by_device(device_name, 0, &context);
mmdeploy_context_add(context, MMDEPLOY_TYPE_PROFILER, nullptr, profiler);
mmdeploy_classifier_t classifier{};
int status{};
status = mmdeploy_classifier_create_v2(model, context, &classifier);
if (status != MMDEPLOY_SUCCESS) {
fprintf(stderr, "failed to create classifier, code: %d\n", (int)status);
return 1;
}
mmdeploy_mat_t mat{
img.data, img.rows, img.cols, 3, MMDEPLOY_PIXEL_FORMAT_BGR, MMDEPLOY_DATA_TYPE_UINT8};
// inference loop
for (int i = 0; i < 100; i++) {
mmdeploy_classification_t* res{};
int* res_count{};
status = mmdeploy_classifier_apply(classifier, &mat, 1, &res, &res_count);
mmdeploy_classifier_release_result(res, res_count, 1);
}
mmdeploy_classifier_destroy(classifier);
mmdeploy_model_destroy(model);
mmdeploy_profiler_destroy(profiler);
mmdeploy_context_destroy(context);
return 0;
}
```
## Analyze profiler Data
The performance data can be visualized using a script.
```bash
python tools/sdk_analyze.py profiler_data.txt
```
The parsing results are as follows: "name" represents the name of the node, "n_call" represents the number of calls, "t_mean" represents the average time consumption, "t_50%" and "t_90%" represent the percentiles of the time consumption.
```bash
+---------------------------+--------+-------+--------+--------+-------+-------+
| name | occupy | usage | n_call | t_mean | t_50% | t_90% |
+===========================+========+=======+========+========+=======+=======+
| ./Pipeline | - | - | 100 | 4.831 | 1.913 | 1.946 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| Preprocess/Compose | - | - | 100 | 0.125 | 0.118 | 0.144 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| LoadImageFromFile | 0.017 | 0.017 | 100 | 0.081 | 0.077 | 0.098 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| Resize | 0.003 | 0.003 | 100 | 0.012 | 0.012 | 0.013 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| CenterCrop | 0.002 | 0.002 | 100 | 0.008 | 0.008 | 0.008 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| Normalize | 0.002 | 0.002 | 100 | 0.009 | 0.009 | 0.009 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| ImageToTensor | 0.002 | 0.002 | 100 | 0.008 | 0.007 | 0.007 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| Collect | 0.001 | 0.001 | 100 | 0.005 | 0.005 | 0.005 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| resnet | 0.968 | 0.968 | 100 | 4.678 | 1.767 | 1.774 |
+---------------------------+--------+-------+--------+--------+-------+-------+
| postprocess | 0.003 | 0.003 | 100 | 0.015 | 0.015 | 0.017 |
+---------------------------+--------+-------+--------+--------+-------+-------+
```
# Quick Start
In terms of model deployment, most ML models require some preprocessing steps on the input data and postprocessing steps on the output to get structured output. MMDeploy sdk provides a lot of pre-processing and post-processing process. When you convert and deploy a model, you can enjoy the convenience brought by mmdeploy sdk.
## Model Conversion
You can refer to [convert model](../02-how-to-run/convert_model.md) for more details.
After model conversion with `--dump-info`, the structure of model directory (tensorrt model) is as follows. If you convert to other backend, the structure will be slightly different. The two images are for quick conversion validation.
```bash
├── deploy.json
├── detail.json
├── pipeline.json
├── end2end.onnx
├── end2end.engine
├── output_pytorch.jpg
└── output_tensorrt.jpg
```
The files related to sdk are:
- deploy.json // model information.
- pipeline.json // inference information.
- end2end.engine // model file for tensort, will be different for other backends.
SDK can read the model directory directly or you can pack the related files to zip archive for better distribution or encryption. To read the zip file, the sdk should build with `-DMMDEPLOY_ZIP_MODEL=ON`
## SDK Inference
Generally speaking, there are three steps to inference a model.
- Create a pipeline
- Load the data
- Model inference
We use `classifier` as an example to show these three steps.
### Create a pipeline
#### Load model from disk
```cpp
std::string model_path = "/data/resnet"; // or "/data/resnet.zip" if build with `-DMMDEPLOY_ZIP_MODEL=ON`
mmdeploy_model_t model;
mmdeploy_model_create_by_path(model_path, &model);
mmdeploy_classifier_t classifier{};
mmdeploy_classifier_create(model, "cpu", 0, &classifier);
```
#### Load model from memory
```cpp
std::string model_path = "/data/resnet.zip"
std::ifstream ifs(model_path, std::ios::binary); // /path/to/zipmodel
ifs.seekg(0, std::ios::end);
auto size = ifs.tellg();
ifs.seekg(0, std::ios::beg);
std::string str(size, '\0'); // binary data, should decrypt if it's encrypted
ifs.read(str.data(), size);
mmdeploy_model_t model;
mmdeploy_model_create(str.data(), size, &model);
mmdeploy_classifier_t classifier{};
mmdeploy_classifier_create(model, "cpu", 0, &classifier);
```
### Load the data
```cpp
cv::Mat img = cv::imread(image_path);
```
### Model inference
```cpp
mmdeploy_classification_t* res{};
int* res_count{};
mmdeploy_classifier_apply(classifier, &mat, 1, &res, &res_count);
```
## <a href='https://mmdeploy.readthedocs.io/en/latest/'>English</a>
## <a href='https://mmdeploy.readthedocs.io/zh_CN/latest/'>简体中文</a>
# Android 下构建方式
- [Android 下构建方式](#android-下构建方式)
- [源码安装](#源码安装)
- [安装构建和编译工具链](#安装构建和编译工具链)
- [安装依赖包](#安装依赖包)
- [安装 MMDeploy SDK 依赖](#安装-mmdeploy-sdk-依赖)
- [编译 MMDeploy](#编译-mmdeploy)
- [编译 SDK 和 Demos](#编译-sdk-和-demos)
______________________________________________________________________
MMDeploy 为 android 平台提供交叉编译的构建方式.
MMDeploy converter 部分在 linux 平台上执行,SDK 部分在 android 平台上执行.
MMDeploy 的交叉编译分为两步:
1. 在 linux 平台上构建 MMDeploy converter. 请根据 [How to build linux](linux-x86_64.md) 进行构建.
2. 使用 android 工具链构建 MMDeploy SDK.
本文档仅提供在 linux 平台上使用 android 工具链进行交叉编译构建 MMDeploy SDK 的方法.
## 源码安装
### 安装构建和编译工具链
- cmake
**保证 cmake的版本 >= 3.14.0**. 如果不是,可以参考以下命令安装 3.20.0 版本. 如要获取其他版本,请参考 [这里](https://cmake.org/install)
```bash
wget https://github.com/Kitware/CMake/releases/download/v3.20.0/cmake-3.20.0-linux-x86_64.tar.gz
tar -xzvf cmake-3.20.0-linux-x86_64.tar.gz
sudo ln -sf $(pwd)/cmake-3.20.0-linux-x86_64/bin/* /usr/bin/
```
- ANDROID NDK 19+
**保证 android ndk 的版本 >= 19.0**. 如果不是,可以参考以下命令安装 r23c 版本. 如要获取其他版本,请参考 [这里](https://developer.android.com/ndk/downloads)
```bash
wget https://dl.google.com/android/repository/android-ndk-r23c-linux.zip
unzip android-ndk-r23c-linux.zip
cd android-ndk-r23c
export NDK_PATH=${PWD}
```
### 安装依赖包
#### 安装 MMDeploy SDK 依赖
如果您只对模型转换感兴趣,那么可以跳过本章节.
<table>
<thead>
<tr>
<th>名称 </th>
<th>安装方式 </th>
</tr>
</thead>
<tbody>
<tr>
<td>OpenCV<br>(>=3.0) </td>
<td>
<pre><code>
export OPENCV_VERSION=4.6.0
wget https://github.com/opencv/opencv/releases/download/${OPENCV_VERSION}/opencv-${OPENCV_VERSION}-android-sdk.zip
unzip opencv-${OPENCV_VERSION}-android-sdk.zip
export OPENCV_ANDROID_SDK_DIR=${PWD}/OpenCV-android-sdk
</code></pre>
</td>
</tr>
<tr>
<td>ncnn </td>
<td>ncnn 是支持 android 平台的高效神经网络推理计算框架</br>
<b> 目前, MMDeploy 支持 ncnn 的 20220721 版本, 且必须使用<code>git clone</code> 下载源码的方式安装。请到 <a href='https://github.com/Tencent/ncnn/releases'> 这里 </a> 查询 ncnn 支持的 android ABI。</b><br>
<pre><code>
git clone -b 20220721 https://github.com/Tencent/ncnn.git
cd ncnn
git submodule update --init
export NCNN_DIR=${PWD}
export ANDROID_ABI=arm64-v8a
mkdir -p build_${ANDROID_ABI}
cd build_${ANDROID_ABI}
cmake -DCMAKE_TOOLCHAIN_FILE=${NDK_PATH}/build/cmake/android.toolchain.cmake -DANDROID_ABI="${ANDROID_ABI}" -DANDROID_PLATFORM=android-30 -DNCNN_VULKAN=ON -DNCNN_DISABLE_EXCEPTION=OFF -DNCNN_DISABLE_RTTI=OFF -DANDROID_USE_LEGACY_TOOLCHAIN_FILE=False ..
make -j$(nproc) install
</code></pre>
</td>
</tr>
<tr>
<td>OpenJDK </td>
<td>编译Java API之前需要先准备OpenJDK开发环境</br>
请参考 <a href='https://github.com/open-mmlab/mmdeploy/tree/main/csrc/mmdeploy/apis/java/README.md'> Java API 编译 </a> 进行构建.
</td>
</tr>
</tbody>
</table>
### 编译 MMDeploy
#### 编译 SDK 和 Demos
下文展示构建 SDK 的样例,用 ncnn 作为推理引擎。
- cpu + ncnn
```Bash
export ANDROID_ABI=arm64-v8a
cd ${MMDEPLOY_DIR}
mkdir -p build_${ANDROID_ABI} && cd build_${ANDROID_ABI}
cmake .. \
-DMMDEPLOY_BUILD_SDK=ON \
-DMMDEPLOY_BUILD_EXAMPLES=ON \
-DMMDEPLOY_BUILD_SDK_JAVA_API=ON \
-DOpenCV_DIR=${OPENCV_ANDROID_SDK_DIR}/sdk/native/jni/abi-${ANDROID_ABI} \
-Dncnn_DIR=${NCNN_DIR}/build_${ANDROID_ABI}/install/lib/cmake/ncnn \
-DMMDEPLOY_TARGET_BACKENDS=ncnn \
-DMMDEPLOY_SHARED_LIBS=OFF \
-DCMAKE_TOOLCHAIN_FILE=${NDK_PATH}/build/cmake/android.toolchain.cmake \
-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=False \
-DANDROID_ABI=${ANDROID_ABI} \
-DANDROID_PLATFORM=android-30 \
-DANDROID_CPP_FEATURES="rtti exceptions"
make -j$(nproc) && make install
```
参考 [cmake 选项说明](cmake_option.md)
# 使用 Docker 镜像
本文简述如何使用[Docker](https://docs.docker.com/get-docker/)安装mmdeploy
## 获取镜像
为了方便用户,mmdeploy在[Docker Hub](https://hub.docker.com/r/openmmlab/mmdeploy)上提供了多个版本的镜像,例如对于`mmdeploy==1.2.0`
其镜像标签为`openmmlab/mmdeploy:ubuntu20.04-cuda11.8-mmdeploy1.2.0`,而最新的镜像标签为`openmmlab/mmdeploy:ubuntu20.04-cuda11.8-mmdeploy`
镜像相关规格信息如下表所示:
| Item | Version |
| :---------: | :---------: |
| OS | Ubuntu20.04 |
| CUDA | 11.8 |
| CUDNN | 8.9 |
| Python | 3.8.10 |
| Torch | 2.0.0 |
| TorchVision | 0.15.0 |
| TorchScript | 2.0.0 |
| TensorRT | 8.6.1.6 |
| ONNXRuntime | 1.15.1 |
| OpenVINO | 2022.3.0 |
| ncnn | 20230816 |
| openppl | 0.8.1 |
用户可选择一个[镜像](https://hub.docker.com/r/openmmlab/mmdeploy/tags)并运行`docker pull`拉取镜像到本地:
```shell
export TAG=openmmlab/mmdeploy:ubuntu20.04-cuda11.8-mmdeploy
docker pull $TAG
```
## 构建镜像(可选)
如果已提供的镜像无法满足要求,用户可修改`docker/Release/Dockerfile`并在本地构建镜像。其中,构建参数`MMDEPLOY_VERSION`可以是[mmdeploy](https://github.com/open-mmlab/mmdeploy)项目的一个[标签](https://github.com/open-mmlab/mmdeploy/tags)或者分支。
```shell
export MMDEPLOY_VERSION=main
export TAG=mmdeploy-${MMDEPLOY_VERSION}
docker build docker/Release/ -t ${TAG} --build-arg MMDEPLOY_VERSION=${MMDEPLOY_VERSION}
```
## 运行 docker 容器
当拉取或构建 docker 镜像后,用户可使用 `docker run` 启动 docker 服务:
```shell
export TAG=openmmlab/mmdeploy:ubuntu20.04-cuda11.8-mmdeploy
docker run --gpus=all -it --rm $TAG
```
## 常见问答
1. CUDA error: the provided PTX was compiled with an unsupported toolchain:
[这里](https://forums.developer.nvidia.com/t/cuda-error-the-provided-ptx-was-compiled-with-an-unsupported-toolchain/185754)所说,更新 GPU 的驱动到您的GPU能使用的最新版本。
2. docker: Error response from daemon: could not select device driver "" with capabilities: [gpu].
```shell
# Add the package repositories
distribution=$(. /etc/os-release;echo $ID$VERSION_ID)
curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | sudo apt-key add -
curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list
sudo apt-get update && sudo apt-get install -y nvidia-container-toolkit
sudo systemctl restart docker
```
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment