diff --git a/csrc/mmdeploy/CMakeLists.txt b/csrc/mmdeploy/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..6bfbd3a95a80c8654c58194d6d7a18e6eff175da --- /dev/null +++ b/csrc/mmdeploy/CMakeLists.txt @@ -0,0 +1,21 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +add_subdirectory(backend_ops) + +if (MMDEPLOY_BUILD_SDK) + # include OpenCV for SDK modules since many of them depends on it + include(${CMAKE_SOURCE_DIR}/cmake/opencv.cmake) + + add_subdirectory(core) + add_subdirectory(execution) + add_subdirectory(utils) + add_subdirectory(archive) + add_subdirectory(device) + add_subdirectory(graph) + add_subdirectory(model) + add_subdirectory(operation) + add_subdirectory(preprocess) + add_subdirectory(net) + add_subdirectory(codebase) + add_subdirectory(apis) +endif () diff --git a/csrc/mmdeploy/apis/CMakeLists.txt b/csrc/mmdeploy/apis/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..1ab877be901709e018d84b8df1df5fd6c1ce137c --- /dev/null +++ b/csrc/mmdeploy/apis/CMakeLists.txt @@ -0,0 +1,11 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +add_subdirectory(c) +add_subdirectory(cxx) +add_subdirectory(java) + +# add python subdir conditionally since it's designed to work as +# a standalone project also +if (MMDEPLOY_BUILD_SDK_PYTHON_API) + add_subdirectory(python) +endif () diff --git a/csrc/mmdeploy/apis/c/CMakeLists.txt b/csrc/mmdeploy/apis/c/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..f08fa8cf86843c7b486307d606e01ebc72dba93c --- /dev/null +++ b/csrc/mmdeploy/apis/c/CMakeLists.txt @@ -0,0 +1,86 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +project(capis) +include(${CMAKE_SOURCE_DIR}/cmake/MMDeploy.cmake) + +set(CAPI_OBJS) + +macro(add_object name) + add_library(${name} OBJECT ${ARGN}) + set_target_properties(${name} PROPERTIES POSITION_INDEPENDENT_CODE 1) + target_compile_definitions(${name} PRIVATE -DMMDEPLOY_API_EXPORTS=1) + if (NOT MSVC) + target_compile_options(${name} PRIVATE $<$:-fvisibility=hidden>) + endif () + target_link_libraries(${name} PRIVATE mmdeploy::core) + target_include_directories(${name} PUBLIC + $ + $) + set(CAPI_OBJS ${CAPI_OBJS} ${name}) + mmdeploy_export(${name}) +endmacro() + +set(COMMON_LIST + common + model + executor + pipeline) + +set(TASK_LIST ${MMDEPLOY_TASKS}) + +foreach (TASK ${COMMON_LIST}) + set(TARGET_NAME mmdeploy_${TASK}) + set(OBJECT_NAME mmdeploy_${TASK}_obj) + add_object(${OBJECT_NAME} ${CMAKE_CURRENT_SOURCE_DIR}/mmdeploy/${TASK}.cpp) + mmdeploy_add_library(${TARGET_NAME}) + target_link_libraries(${TARGET_NAME} PRIVATE ${OBJECT_NAME}) + target_include_directories(${TARGET_NAME} PUBLIC + $ + $) + install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/mmdeploy/${TASK}.h + DESTINATION include/mmdeploy) +endforeach () + +target_link_libraries(mmdeploy_executor PUBLIC + mmdeploy_execution mmdeploy_common) +target_link_libraries(mmdeploy_pipeline PUBLIC + mmdeploy_executor mmdeploy_model mmdeploy_common) + +foreach (TASK ${TASK_LIST}) + set(TARGET_NAME mmdeploy_${TASK}) + set(OBJECT_NAME mmdeploy_${TASK}_obj) + add_object(${OBJECT_NAME} ${CMAKE_CURRENT_SOURCE_DIR}/mmdeploy/${TASK}.cpp) + mmdeploy_add_library(${TARGET_NAME}) + target_link_libraries(${TARGET_NAME} PRIVATE ${OBJECT_NAME} + mmdeploy_pipeline) + target_include_directories(${TARGET_NAME} PUBLIC + $ + $) + install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/mmdeploy/${TASK}.h + DESTINATION include/mmdeploy) +endforeach () + +install(DIRECTORY ${CMAKE_SOURCE_DIR}/demo/csrc/ DESTINATION example/cpp + FILES_MATCHING + PATTERN "*.cpp" + PATTERN "CMakeLists.txt" + ) + +if (MMDEPLOY_BUILD_SDK_CSHARP_API OR MMDEPLOY_BUILD_SDK_MONOLITHIC) + add_library(mmdeploy SHARED) + mmdeploy_load_static(mmdeploy MMDeployStaticModules) + mmdeploy_load_dynamic(mmdeploy MMDeployDynamicModules) + target_link_libraries(mmdeploy PRIVATE ${CAPI_OBJS} mmdeploy_execution) + target_include_directories(mmdeploy PUBLIC + $ + $) + set(MMDEPLOY_VERSION ${MMDEPLOY_VERSION_MAJOR} + .${MMDEPLOY_VERSION_MINOR} + .${MMDEPLOY_VERSION_PATCH}) + string(REPLACE ";" "" MMDEPLOY_VERSION ${MMDEPLOY_VERSION}) + set_target_properties(mmdeploy PROPERTIES + VERSION ${MMDEPLOY_VERSION} + SOVERSION ${MMDEPLOY_VERSION_MAJOR}) + mmdeploy_add_rpath(mmdeploy) + mmdeploy_export_impl(mmdeploy) +endif () diff --git a/csrc/mmdeploy/apis/c/mmdeploy/classifier.cpp b/csrc/mmdeploy/apis/c/mmdeploy/classifier.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3eec4ef90b9d31d15c99cf56c1ec47448eceec9a --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/classifier.cpp @@ -0,0 +1,133 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/classifier.h" + +#include + +#include "mmdeploy/archive/value_archive.h" +#include "mmdeploy/codebase/mmcls/mmcls.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/device.h" +#include "mmdeploy/core/graph.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/handle.h" +#include "mmdeploy/pipeline.h" + +using namespace mmdeploy; +using namespace std; + +int mmdeploy_classifier_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_classifier_t* classifier) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_classifier_create_v2(model, context, classifier); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_classifier_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_classifier_t* classifier) { + mmdeploy_model_t model{}; + + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_classifier_create(model, device_name, device_id, classifier); + mmdeploy_model_destroy(model); + return ec; +} + +int mmdeploy_classifier_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_classifier_t* classifier) { + return mmdeploy_pipeline_create_from_model(model, context, (mmdeploy_pipeline_t*)classifier); +} + +int mmdeploy_classifier_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* value) { + return mmdeploy_common_create_input(mats, mat_count, value); +} + +int mmdeploy_classifier_apply(mmdeploy_classifier_t classifier, const mmdeploy_mat_t* mats, + int mat_count, mmdeploy_classification_t** results, + int** result_count) { + wrapped input; + if (auto ec = mmdeploy_classifier_create_input(mats, mat_count, input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_classifier_apply_v2(classifier, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_classifier_get_result(output, results, result_count)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_classifier_apply_v2(mmdeploy_classifier_t classifier, mmdeploy_value_t input, + mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)classifier, input, output); +} + +int mmdeploy_classifier_apply_async(mmdeploy_classifier_t classifier, mmdeploy_sender_t input, + mmdeploy_sender_t* output) { + return mmdeploy_pipeline_apply_async((mmdeploy_pipeline_t)classifier, input, output); +} + +int mmdeploy_classifier_get_result(mmdeploy_value_t output, mmdeploy_classification_t** results, + int** result_count) { + if (!output || !results || !result_count) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + Value& value = Cast(output)->front(); + + auto classify_outputs = from_value>(value); + + vector _result_count; + _result_count.reserve(classify_outputs.size()); + + for (const auto& cls_output : classify_outputs) { + _result_count.push_back((int)cls_output.size()); + } + + auto total = std::accumulate(begin(_result_count), end(_result_count), 0); + + std::unique_ptr result_count_data(new int[_result_count.size()]{}); + std::copy(_result_count.begin(), _result_count.end(), result_count_data.get()); + + std::unique_ptr result_data( + new mmdeploy_classification_t[total]{}); + auto result_ptr = result_data.get(); + for (const auto& cls_output : classify_outputs) { + for (const auto& label : cls_output) { + result_ptr->label_id = label.label_id; + result_ptr->score = label.score; + ++result_ptr; + } + } + + *result_count = result_count_data.release(); + *results = result_data.release(); + + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +void mmdeploy_classifier_release_result(mmdeploy_classification_t* results, const int* result_count, + int count) { + delete[] results; + delete[] result_count; +} + +void mmdeploy_classifier_destroy(mmdeploy_classifier_t classifier) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)classifier); +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/classifier.h b/csrc/mmdeploy/apis/c/mmdeploy/classifier.h new file mode 100644 index 0000000000000000000000000000000000000000..54e9d0215b816520eb83826bdcb73c3c9dcb4de0 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/classifier.h @@ -0,0 +1,139 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file classifier.h + * @brief Interface to MMClassification task + */ + +#ifndef MMDEPLOY_CLASSIFIER_H +#define MMDEPLOY_CLASSIFIER_H + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_classification_t { + int label_id; + float score; +} mmdeploy_classification_t; + +typedef struct mmdeploy_classifier* mmdeploy_classifier_t; + +/** + * @brief Create classifier's handle + * @param[in] model an instance of mmclassification sdk model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] classifier instance of a classifier, which must be destroyed + * by \ref mmdeploy_classifier_destroy + * @return status of creating classifier's handle + */ +MMDEPLOY_API int mmdeploy_classifier_create(mmdeploy_model_t model, const char* device_name, + int device_id, mmdeploy_classifier_t* classifier); + +/** + * @brief Create classifier's handle + * @param[in] model_path path of mmclassification sdk model exported by mmdeploy model converter + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] classifier instance of a classifier, which must be destroyed + * by \ref mmdeploy_classifier_destroy + * @return status of creating classifier's handle + */ +MMDEPLOY_API int mmdeploy_classifier_create_by_path(const char* model_path, const char* device_name, + int device_id, + mmdeploy_classifier_t* classifier); + +/** + * @brief Use classifier created by \ref mmdeploy_classifier_create_by_path to get label + * information of each image in a batch + * @param[in] classifier classifier's handle created by \ref mmdeploy_classifier_create_by_path + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @param[out] results a linear buffer to save classification results of each + * image, which must be freed by \ref mmdeploy_classifier_release_result + * @param[out] result_count a linear buffer with length being \p mat_count to save the number of + * classification results of each image. It must be released by \ref + * mmdeploy_classifier_release_result + * @return status of inference + */ +MMDEPLOY_API int mmdeploy_classifier_apply(mmdeploy_classifier_t classifier, + const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_classification_t** results, int** result_count); + +/** + * @brief Release the inference result buffer created \ref mmdeploy_classifier_apply + * @param[in] results classification results buffer + * @param[in] result_count \p results size buffer + * @param[in] count length of \p result_count + */ +MMDEPLOY_API void mmdeploy_classifier_release_result(mmdeploy_classification_t* results, + const int* result_count, int count); + +/** + * @brief Destroy classifier's handle + * @param[in] classifier classifier's handle created by \ref mmdeploy_classifier_create_by_path + */ +MMDEPLOY_API void mmdeploy_classifier_destroy(mmdeploy_classifier_t classifier); + +/****************************************************************************** + * Experimental asynchronous APIs */ + +/** + * @brief Same as \ref mmdeploy_classifier_create, but allows to control execution context of tasks + * via context + */ +MMDEPLOY_API int mmdeploy_classifier_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_classifier_t* classifier); + +/** + * @brief Pack classifier inputs into mmdeploy_value_t + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @param[out] value the packed value + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_classifier_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* value); + +/** + * @brief Same as \ref mmdeploy_classifier_apply, but input and output are packed in \ref + * mmdeploy_value_t. + */ +MMDEPLOY_API int mmdeploy_classifier_apply_v2(mmdeploy_classifier_t classifier, + mmdeploy_value_t input, mmdeploy_value_t* output); + +/** + * @brief Apply classifier asynchronously + * @param[in] classifier handle of the classifier + * @param[in] input input sender that will be consumed by the operation + * @param[out] output output sender + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_classifier_apply_async(mmdeploy_classifier_t classifier, + mmdeploy_sender_t input, + mmdeploy_sender_t* output); + +/** + * + * @param[in] output output obtained by applying a classifier + * @param[out] results a linear buffer containing classification results of each image, released by + * \ref mmdeploy_classifier_release_result + * @param[out] result_count a linear buffer containing the number of results for each input image, + * released by \ref mmdeploy_classifier_release_result + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_classifier_get_result(mmdeploy_value_t output, + mmdeploy_classification_t** results, + int** result_count); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_CLASSIFIER_H diff --git a/csrc/mmdeploy/apis/c/mmdeploy/common.cpp b/csrc/mmdeploy/apis/c/mmdeploy/common.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e00cc3f1cfa246f4cf470c195628739d6cd19928 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/common.cpp @@ -0,0 +1,115 @@ +#include "mmdeploy/common.h" + +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/mat.h" +#include "mmdeploy/core/profiler.h" +#include "mmdeploy/executor_internal.h" + +mmdeploy_value_t mmdeploy_value_copy(mmdeploy_value_t value) { + if (!value) { + return nullptr; + } + return Guard([&] { return Take(Value(*Cast(value))); }); +} + +void mmdeploy_value_destroy(mmdeploy_value_t value) { delete Cast(value); } + +int mmdeploy_context_create(mmdeploy_context_t* context) { + *context = (mmdeploy_context_t) new Value; + return 0; +} + +int mmdeploy_context_create_by_device(const char* device_name, int device_id, + mmdeploy_context_t* context) { + mmdeploy_device_t device{}; + int ec = MMDEPLOY_SUCCESS; + mmdeploy_context_t _context{}; + ec = mmdeploy_context_create(&_context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_device_create(device_name, device_id, &device); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_context_add(_context, MMDEPLOY_TYPE_DEVICE, nullptr, device); + mmdeploy_device_destroy(device); + if (ec == MMDEPLOY_SUCCESS) { + *context = _context; + } + return ec; +} + +void mmdeploy_context_destroy(mmdeploy_context_t context) { delete Cast(context); } + +int mmdeploy_common_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* value) { + if (mat_count && mats == nullptr) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + auto input = std::make_unique(Value{Value::kArray}); + for (int i = 0; i < mat_count; ++i) { + input->front().push_back({{"ori_img", Cast(mats[i])}}); + } + *value = Cast(input.release()); + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_device_create(const char* device_name, int device_id, mmdeploy_device_t* device) { + Device tmp(device_name, device_id); + if (tmp.platform_id() == -1) { + MMDEPLOY_ERROR("Device \"{}\" not found", device_name); + return MMDEPLOY_E_INVALID_ARG; + } + *device = (mmdeploy_device_t) new Device(tmp); + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_device_destroy(mmdeploy_device_t device) { delete (Device*)device; } + +int mmdeploy_profiler_create(const char* path, mmdeploy_profiler_t* profiler) { + *profiler = (mmdeploy_profiler_t) new profiler::Profiler(path); + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_profiler_destroy(mmdeploy_profiler_t profiler) { + if (profiler) { + auto p = (profiler::Profiler*)profiler; + p->Release(); + delete p; + } +} + +int mmdeploy_context_add(mmdeploy_context_t context, mmdeploy_context_type_t type, const char* name, + const void* object) { + auto& ctx = *Cast(context); + switch (type) { + case MMDEPLOY_TYPE_DEVICE: { + const auto& device = *(Device*)object; + ctx["device"] = device; + ctx["stream"] = Stream(device); + break; + } + case MMDEPLOY_TYPE_SCHEDULER: + ctx["scheduler"][name] = *Cast((const mmdeploy_scheduler_t)object); + break; + case MMDEPLOY_TYPE_MODEL: + ctx["model"][name] = *Cast((const mmdeploy_model_t)object); + break; + case MMDEPLOY_TYPE_PROFILER: { + const auto& profiler = *(profiler::Profiler*)object; + profiler::Scope* root(profiler.scope()); + ctx["scope"] = root; + break; + } + default: + return MMDEPLOY_E_NOT_SUPPORTED; + } + return 0; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/common.h b/csrc/mmdeploy/apis/c/mmdeploy/common.h new file mode 100644 index 0000000000000000000000000000000000000000..c665134cbf48256ab3116a4758790bcbf64edf3e --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/common.h @@ -0,0 +1,192 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#ifndef MMDEPLOY_COMMON_H +#define MMDEPLOY_COMMON_H + +#include // NOLINT + +#ifndef MMDEPLOY_EXPORT +#ifdef _MSC_VER +#define MMDEPLOY_EXPORT __declspec(dllexport) +#else +#define MMDEPLOY_EXPORT __attribute__((visibility("default"))) +#endif +#endif + +#ifndef MMDEPLOY_API +#ifdef MMDEPLOY_API_EXPORTS +#define MMDEPLOY_API MMDEPLOY_EXPORT +#else +#define MMDEPLOY_API +#endif +#endif + +// clang-format off + +typedef enum mmdeploy_pixel_format_t{ + MMDEPLOY_PIXEL_FORMAT_BGR, + MMDEPLOY_PIXEL_FORMAT_RGB, + MMDEPLOY_PIXEL_FORMAT_GRAYSCALE, + MMDEPLOY_PIXEL_FORMAT_NV12, + MMDEPLOY_PIXEL_FORMAT_NV21, + MMDEPLOY_PIXEL_FORMAT_BGRA, + MMDEPLOY_PIXEL_FORMAT_COUNT +} mmdeploy_pixel_format_t; + +typedef enum mmdeploy_data_type_t{ + MMDEPLOY_DATA_TYPE_FLOAT, + MMDEPLOY_DATA_TYPE_HALF, + MMDEPLOY_DATA_TYPE_UINT8, + MMDEPLOY_DATA_TYPE_INT32, + MMDEPLOY_DATA_TYPE_COUNT +} mmdeploy_data_type_t; + +typedef enum mmdeploy_status_t { + MMDEPLOY_SUCCESS = 0, + MMDEPLOY_E_INVALID_ARG = 1, + MMDEPLOY_E_NOT_SUPPORTED = 2, + MMDEPLOY_E_OUT_OF_RANGE = 3, + MMDEPLOY_E_OUT_OF_MEMORY = 4, + MMDEPLOY_E_FILE_NOT_EXIST = 5, + MMDEPLOY_E_FAIL = 6, + MMDEPLOY_STATUS_COUNT = 7 +} mmdeploy_status_t; + +// clang-format on + +typedef struct mmdeploy_device* mmdeploy_device_t; + +typedef struct mmdeploy_profiler* mmdeploy_profiler_t; + +typedef struct mmdeploy_mat_t { + uint8_t* data; + int height; + int width; + int channel; + mmdeploy_pixel_format_t format; + mmdeploy_data_type_t type; + mmdeploy_device_t device; +} mmdeploy_mat_t; + +typedef struct mmdeploy_rect_t { + float left; + float top; + float right; + float bottom; +} mmdeploy_rect_t; + +typedef struct mmdeploy_point_t { + float x; + float y; +} mmdeploy_point_t; + +typedef struct mmdeploy_value* mmdeploy_value_t; + +typedef struct mmdeploy_context* mmdeploy_context_t; + +typedef enum mmdeploy_context_type_t { + MMDEPLOY_TYPE_DEVICE = 0, + MMDEPLOY_TYPE_STREAM = 1, + MMDEPLOY_TYPE_MODEL = 2, + MMDEPLOY_TYPE_SCHEDULER = 3, + MMDEPLOY_TYPE_MAT = 4, + MMDEPLOY_TYPE_PROFILER = 5, +} mmdeploy_context_type_t; + +#if __cplusplus +extern "C" { +#endif + +/** + * Copy value + * @param value + * @return + */ +MMDEPLOY_API mmdeploy_value_t mmdeploy_value_copy(mmdeploy_value_t value); + +/** + * Destroy value + * @param value + */ +MMDEPLOY_API void mmdeploy_value_destroy(mmdeploy_value_t value); + +/** + * Create device handle + * @param device_name + * @param device_id + * @param device + * @return + */ +MMDEPLOY_API int mmdeploy_device_create(const char* device_name, int device_id, + mmdeploy_device_t* device); + +/** + * Destroy device handle + * @param device + */ +MMDEPLOY_API void mmdeploy_device_destroy(mmdeploy_device_t device); + +/** + * Create profiler + * @param path path to save the profile data + * @param profiler handle for profiler, should be added to context and deleted by + * mmdeploy_profiler_destroy + * @return status of create + */ +MMDEPLOY_API int mmdeploy_profiler_create(const char* path, mmdeploy_profiler_t* profiler); + +/** + * Destroy profiler handle + * @param profiler handle for profiler, profile data will be written to disk after this call + */ +MMDEPLOY_API void mmdeploy_profiler_destroy(mmdeploy_profiler_t profiler); + +/** + * Create context + * @param context + * @return + */ +MMDEPLOY_API int mmdeploy_context_create(mmdeploy_context_t* context); + +/** + * Create context + * @param device_name + * @param device_id + * @param context + * @return + */ +MMDEPLOY_API int mmdeploy_context_create_by_device(const char* device_name, int device_id, + mmdeploy_context_t* context); + +/** + * Destroy context + * @param context + */ +MMDEPLOY_API void mmdeploy_context_destroy(mmdeploy_context_t context); + +/** + * Add context object + * @param context + * @param type + * @param name + * @param object + * @return + */ +MMDEPLOY_API int mmdeploy_context_add(mmdeploy_context_t context, mmdeploy_context_type_t type, + const char* name, const void* object); + +/** + * Create input value from array of mats + * @param mats + * @param mat_count + * @param value + * @return + */ +MMDEPLOY_API int mmdeploy_common_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* value); + +#if __cplusplus +} +#endif + +#endif // MMDEPLOY_COMMON_H diff --git a/csrc/mmdeploy/apis/c/mmdeploy/common_internal.h b/csrc/mmdeploy/apis/c/mmdeploy/common_internal.h new file mode 100644 index 0000000000000000000000000000000000000000..a1ddecb54d17e227d9dcff744c2d408ba693d831 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/common_internal.h @@ -0,0 +1,105 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#ifndef MMDEPLOY_CSRC_APIS_C_COMMON_INTERNAL_H_ +#define MMDEPLOY_CSRC_APIS_C_COMMON_INTERNAL_H_ + +#include "mmdeploy/common.h" +#include "mmdeploy/core/mat.h" +#include "mmdeploy/core/value.h" +#include "mmdeploy/handle.h" +#include "mmdeploy/model.h" +#include "mmdeploy/pipeline.h" + +using namespace mmdeploy; + +namespace { + +inline mmdeploy_value_t Cast(Value* s) { return reinterpret_cast(s); } + +inline Value* Cast(mmdeploy_value_t s) { return reinterpret_cast(s); } + +inline Value Take(mmdeploy_value_t v) { + auto value = std::move(*Cast(v)); + mmdeploy_value_destroy(v); + return value; +} + +inline Value* Cast(mmdeploy_context_t c) { return reinterpret_cast(c); } + +inline mmdeploy_value_t Take(Value v) { + return Cast(new Value(std::move(v))); // NOLINT +} + +inline mmdeploy_pipeline_t Cast(AsyncHandle* pipeline) { + return reinterpret_cast(pipeline); +} + +inline AsyncHandle* Cast(mmdeploy_pipeline_t pipeline) { + return reinterpret_cast(pipeline); +} + +inline mmdeploy_model_t Cast(Model* model) { return reinterpret_cast(model); } + +inline Model* Cast(mmdeploy_model_t model) { return reinterpret_cast(model); } + +inline Mat Cast(const mmdeploy_mat_t& mat) { + return Mat{mat.height, mat.width, PixelFormat(mat.format), + DataType(mat.type), mat.data, mat.device ? *(const Device*)mat.device : Device{0}}; +} + +template +std::invoke_result_t Guard(F f) { + try { + return f(); + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return nullptr; +} + +template +class wrapped {}; + +template +class wrapped> { + public: + wrapped() noexcept : v_(nullptr) {} + explicit wrapped(T v) noexcept : v_(v) {} + + void reset() { + if (v_) { + delete Cast(v_); + v_ = nullptr; + } + } + + ~wrapped() { reset(); } + + wrapped(const wrapped&) = delete; + wrapped& operator=(const wrapped&) = delete; + + wrapped(wrapped&& other) noexcept : v_(other.release()) {} + wrapped& operator=(wrapped&& other) noexcept { + reset(); + v_ = other.release(); + return *this; + } + + T release() noexcept { return std::exchange(v_, nullptr); } + + auto operator*() { return Cast(v_); } + auto operator-> () { return Cast(v_); } + + T* ptr() noexcept { return &v_; } + + operator T() const noexcept { return v_; } // NOLINT + + private: + T v_; +}; + +} // namespace + +#endif // MMDEPLOY_CSRC_APIS_C_COMMON_INTERNAL_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/detector.cpp b/csrc/mmdeploy/apis/c/mmdeploy/detector.cpp new file mode 100644 index 0000000000000000000000000000000000000000..aadf92fb62c42efcd83ef0b8982a60188f14a96d --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/detector.cpp @@ -0,0 +1,149 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/detector.h" + +#include +#include + +#include "mmdeploy/apis/c/mmdeploy/common_internal.h" +#include "mmdeploy/apis/c/mmdeploy/model.h" +#include "mmdeploy/apis/c/mmdeploy/pipeline.h" +#include "mmdeploy/archive/value_archive.h" +#include "mmdeploy/codebase/mmdet/mmdet.h" +#include "mmdeploy/core/device.h" +#include "mmdeploy/core/model.h" +#include "mmdeploy/core/mpl/structure.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/core/value.h" + +using namespace std; +using namespace mmdeploy; + +using ResultType = mmdeploy::Structure, // + std::deque, // + std::vector>; // + +int mmdeploy_detector_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_detector_t* detector) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_detector_create_v2(model, context, detector); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_detector_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_detector_t* detector) { + return mmdeploy_pipeline_create_from_model(model, context, (mmdeploy_pipeline_t*)detector); +} + +int mmdeploy_detector_create_by_path(const char* model_path, const char* device_name, int device_id, + mmdeploy_detector_t* detector) { + mmdeploy_model_t model{}; + + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_detector_create(model, device_name, device_id, detector); + mmdeploy_model_destroy(model); + return ec; +} + +int mmdeploy_detector_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* input) { + return mmdeploy_common_create_input(mats, mat_count, input); +} + +int mmdeploy_detector_apply(mmdeploy_detector_t detector, const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_detection_t** results, int** result_count) { + wrapped input; + if (auto ec = mmdeploy_detector_create_input(mats, mat_count, input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_detector_apply_v2(detector, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_detector_get_result(output, results, result_count)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_detector_apply_v2(mmdeploy_detector_t detector, mmdeploy_value_t input, + mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)detector, input, output); +} + +int mmdeploy_detector_apply_async(mmdeploy_detector_t detector, mmdeploy_sender_t input, + mmdeploy_sender_t* output) { + return mmdeploy_pipeline_apply_async((mmdeploy_pipeline_t)detector, input, output); +} + +int mmdeploy_detector_get_result(mmdeploy_value_t output, mmdeploy_detection_t** results, + int** result_count) { + if (!output || !results || !result_count) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + Value& value = Cast(output)->front(); + auto detector_outputs = from_value>(value); + + vector _result_count(detector_outputs.size()); + size_t total = 0; + for (size_t i = 0; i < detector_outputs.size(); ++i) { + _result_count[i] = static_cast(detector_outputs[i].size()); + total += detector_outputs[i].size(); + } + + ResultType r({total, 1, 1, 1}); + auto [result_data, result_count_vec, masks, buffers] = r.pointers(); + + auto result_ptr = result_data; + + for (const auto& det_output : detector_outputs) { + for (const auto& detection : det_output) { + result_ptr->label_id = detection.label_id; + result_ptr->score = detection.score; + const auto& bbox = detection.bbox; + result_ptr->bbox = {bbox[0], bbox[1], bbox[2], bbox[3]}; + auto mask_byte_size = detection.mask.byte_size(); + if (mask_byte_size) { + auto& mask = detection.mask; + result_ptr->mask = &masks->emplace_back(); + buffers->push_back(mask.buffer()); + result_ptr->mask->data = mask.data(); + result_ptr->mask->width = mask.width(); + result_ptr->mask->height = mask.height(); + } + ++result_ptr; + } + } + + *result_count_vec = std::move(_result_count); + *result_count = result_count_vec->data(); + *results = result_data; + r.release(); + + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +void mmdeploy_detector_release_result(mmdeploy_detection_t* results, const int* result_count, + int count) { + auto num_dets = std::accumulate(result_count, result_count + count, 0); + ResultType deleter({static_cast(num_dets), 1, 1, 1}, results); +} + +void mmdeploy_detector_destroy(mmdeploy_detector_t detector) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)detector); +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/detector.h b/csrc/mmdeploy/apis/c/mmdeploy/detector.h new file mode 100644 index 0000000000000000000000000000000000000000..5c5ba2f35670d2e1d9216d7cade74e4ab33f400e --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/detector.h @@ -0,0 +1,139 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file detector.h + * @brief Interface to MMDetection task + */ + +#ifndef MMDEPLOY_DETECTOR_H +#define MMDEPLOY_DETECTOR_H + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_instance_mask_t { + char* data; + int height; + int width; +} mmdeploy_instance_mask_t; + +typedef struct mmdeploy_detection_t { + int label_id; + float score; + mmdeploy_rect_t bbox; + mmdeploy_instance_mask_t* mask; +} mmdeploy_detection_t; + +typedef struct mmdeploy_detector* mmdeploy_detector_t; + +/** + * @brief Create detector's handle + * @param[in] model an instance of mmdetection sdk model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] detector instance of a detector + * @return status of creating detector's handle + */ +MMDEPLOY_API int mmdeploy_detector_create(mmdeploy_model_t model, const char* device_name, + int device_id, mmdeploy_detector_t* detector); + +/** + * @brief Create detector's handle + * @param[in] model_path path of mmdetection sdk model exported by mmdeploy model converter + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] detector instance of a detector + * @return status of creating detector's handle + */ +MMDEPLOY_API int mmdeploy_detector_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_detector_t* detector); + +/** + * @brief Apply detector to batch images and get their inference results + * @param[in] detector detector's handle created by \ref mmdeploy_detector_create_by_path + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @param[out] results a linear buffer to save detection results of each image. It must be released + * by \ref mmdeploy_detector_release_result + * @param[out] result_count a linear buffer with length being \p mat_count to save the number of + * detection results of each image. And it must be released by \ref + * mmdeploy_detector_release_result + * @return status of inference + */ +MMDEPLOY_API int mmdeploy_detector_apply(mmdeploy_detector_t detector, const mmdeploy_mat_t* mats, + int mat_count, mmdeploy_detection_t** results, + int** result_count); + +/** @brief Release the inference result buffer created by \ref mmdeploy_detector_apply + * @param[in] results detection results buffer + * @param[in] result_count \p results size buffer + * @param[in] count length of \p result_count + */ +MMDEPLOY_API void mmdeploy_detector_release_result(mmdeploy_detection_t* results, + const int* result_count, int count); + +/** + * @brief Destroy detector's handle + * @param[in] detector detector's handle created by \ref mmdeploy_detector_create_by_path + */ +MMDEPLOY_API void mmdeploy_detector_destroy(mmdeploy_detector_t detector); + +/****************************************************************************** + * Experimental asynchronous APIs */ + +/** + * @brief Same as \ref mmdeploy_detector_create, but allows to control execution context of tasks + * via context + */ +MMDEPLOY_API int mmdeploy_detector_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_detector_t* detector); + +/** + * @brief Pack detector inputs into mmdeploy_value_t + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @return the created value + */ +MMDEPLOY_API int mmdeploy_detector_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* input); + +/** + * @brief Same as \ref mmdeploy_detector_apply, but input and output are packed in \ref + * mmdeploy_value_t. + */ +MMDEPLOY_API int mmdeploy_detector_apply_v2(mmdeploy_detector_t detector, mmdeploy_value_t input, + mmdeploy_value_t* output); + +/** + * @brief Apply detector asynchronously + * @param[in] detector handle to the detector + * @param[in] input input sender + * @return output sender + */ +MMDEPLOY_API int mmdeploy_detector_apply_async(mmdeploy_detector_t detector, + mmdeploy_sender_t input, mmdeploy_sender_t* output); + +/** + * @brief Unpack detector output from a mmdeploy_value_t + * @param[in] output output obtained by applying a detector + * @param[out] results a linear buffer to save detection results of each image. It must be released + * by \ref mmdeploy_detector_release_result + * @param[out] result_count a linear buffer with length number of input images to save the number of + * detection results of each image. Must be released by \ref + * mmdeploy_detector_release_result + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_detector_get_result(mmdeploy_value_t output, + mmdeploy_detection_t** results, int** result_count); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_DETECTOR_H diff --git a/csrc/mmdeploy/apis/c/mmdeploy/executor.cpp b/csrc/mmdeploy/apis/c/mmdeploy/executor.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2fdfb9091f1354e6b802c742ef774dca6bdb06c8 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/executor.cpp @@ -0,0 +1,207 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/executor.h" + +#include "mmdeploy/common.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/execution/when_all_value.h" +#include "mmdeploy/executor_internal.h" + +using namespace mmdeploy; + +namespace { + +mmdeploy_scheduler_t CreateScheduler(const char* type, const Value& config = Value()) { + try { + auto creator = gRegistry().Get(type); + if (!creator) { + MMDEPLOY_ERROR("Creator for {} not found. Available schedulers: {}", type, + gRegistry().List()); + return nullptr; + } + return Cast(new SchedulerType(creator->Create(config))); + } catch (const std::exception& e) { + MMDEPLOY_ERROR("failed to create Scheduler: {} ({}), config: {}", type, e.what(), config); + return nullptr; + } +} + +} // namespace + +mmdeploy_sender_t mmdeploy_sender_copy(mmdeploy_sender_t input) { + if (!input) { + return nullptr; + } + return Take(SenderType(*Cast(input))); +} + +int mmdeploy_sender_destroy(mmdeploy_sender_t sender) { + delete Cast(sender); + return 0; +} + +mmdeploy_scheduler_t mmdeploy_executor_inline() { return CreateScheduler("Inline"); } + +mmdeploy_scheduler_t mmdeploy_executor_system_pool() { + // create a thread pool context and hold its shared handle + static auto scheduler = *Cast(CreateScheduler("ThreadPool")); + // return a copy of the handle to the thread pool + return Cast(new SchedulerType(scheduler)); +} + +mmdeploy_scheduler_t mmdeploy_executor_create_thread_pool(int num_threads) { + return CreateScheduler("ThreadPool", {{"num_threads", num_threads}}); +} + +mmdeploy_scheduler_t mmdeploy_executor_create_thread() { return CreateScheduler("SingleThread"); } + +mmdeploy_scheduler_t mmdeploy_executor_dynamic_batch(mmdeploy_scheduler_t scheduler, + int max_batch_size, int timeout) { + if (!scheduler) { + return nullptr; + } + return CreateScheduler( + "DynamicBatch", + {{"scheduler", *Cast(scheduler)}, {"max_batch_size", max_batch_size}, {"timeout", timeout}}); +} + +int mmdeploy_scheduler_destroy(mmdeploy_scheduler_t scheduler) { + delete Cast(scheduler); + return 0; +} + +mmdeploy_sender_t mmdeploy_executor_just(mmdeploy_value_t value) { + if (value) { + return Guard([&] { return Take(Just(*Cast(value))); }); + } else { + return Take(Just(Value())); + } +} + +mmdeploy_sender_t mmdeploy_executor_schedule(mmdeploy_scheduler_t scheduler) { + if (!scheduler) { + return nullptr; + } + return Guard([&] { return Take(Then(Schedule(*Cast(scheduler)), [] { return Value(); })); }); +} + +mmdeploy_sender_t mmdeploy_executor_transfer_just(mmdeploy_scheduler_t scheduler, + mmdeploy_value_t value) { + if (!scheduler || !value) { + return nullptr; + } + return Guard([&] { return Take(TransferJust(*Cast(scheduler), *Cast(value))); }); +} + +mmdeploy_sender_t mmdeploy_executor_transfer(mmdeploy_sender_t input, + mmdeploy_scheduler_t scheduler) { + if (!input || !scheduler) { + return nullptr; + } + return Guard([&] { return Take(Transfer(Take(input), *Cast(scheduler))); }); +} + +mmdeploy_sender_t mmdeploy_executor_on(mmdeploy_scheduler_t scheduler, mmdeploy_sender_t input) { + if (!scheduler || !input) { + return nullptr; + } + return Guard([&] { return Take(On(*Cast(scheduler), Take(input))); }); +} + +mmdeploy_sender_t mmdeploy_executor_then(mmdeploy_sender_t input, mmdeploy_then_fn_t fn, + void* context) { + if (!input || !fn) { + return nullptr; + } + return Guard([&] { + return Take(Then(Take(input), [fn, context](Value args) { + auto out = Cast(fn(Take(std::move(args)), context)); + Value ret(std::move(*out)); + delete out; + return ret; + })); + }); +} + +mmdeploy_sender_t mmdeploy_executor_let_value(mmdeploy_sender_t input, mmdeploy_let_value_fn_t fn, + void* context) { + if (!input || !fn) { + return nullptr; + } + return Guard([&] { + return Take(LetValue(Take(input), [fn, context](Value& args) { + auto out = Cast(fn(Cast(&args), context)); + SenderType ret(std::move(*out)); + delete out; + return ret; + })); + }); +} + +mmdeploy_sender_t mmdeploy_executor_split(mmdeploy_sender_t input) { + if (!input) { + return nullptr; + } + return Guard([&] { return Take(Split(Take(input))); }); +} + +mmdeploy_sender_t mmdeploy_executor_when_all(mmdeploy_sender_t inputs[], int32_t n) { + if (!inputs) { + return nullptr; + } + return Guard([&] { + std::vector senders; + senders.reserve(n); + for (int i = 0; i < n; ++i) { + senders.emplace_back(Take(inputs[i])); + } + return Take( + Then(WhenAll(std::move(senders)), [](Value::Array&& v) { return Value(std::move(v)); })); + }); +} + +mmdeploy_sender_t mmdeploy_executor_ensure_started(mmdeploy_sender_t input) { + if (!input) { + return nullptr; + } + return Guard([&] { return Take(EnsureStarted(Take(input))); }); +} + +int mmdeploy_executor_start_detached(mmdeploy_sender_t input) { + if (!input) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + StartDetached(Take(input)); + return 0; + } catch (...) { + } + return MMDEPLOY_E_FAIL; +} + +mmdeploy_value_t mmdeploy_executor_sync_wait(mmdeploy_sender_t input) { + if (!input) { + return nullptr; + } + return Guard([&] { return Take(std::get(SyncWait(Take(input)))); }); +} + +int mmdeploy_executor_sync_wait_v2(mmdeploy_sender_t sender, mmdeploy_value_t* value) { + if (!sender) { + return MMDEPLOY_E_INVALID_ARG; + } + auto result = mmdeploy_executor_sync_wait(sender); + if (!result) { + return MMDEPLOY_E_FAIL; + } + if (value) { + *value = result; + } else { + mmdeploy_value_destroy(result); + } + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_executor_execute(mmdeploy_scheduler_t scheduler, void (*fn)(void*), void* context) { + Execute(*Cast(scheduler), [fn, context] { fn(context); }); +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/executor.h b/csrc/mmdeploy/apis/c/mmdeploy/executor.h new file mode 100644 index 0000000000000000000000000000000000000000..a2c8ffa387a6a90dcc086b71942feafbe43959ad --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/executor.h @@ -0,0 +1,141 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#ifndef MMDEPLOY_CSRC_APIS_C_EXECUTOR_H_ +#define MMDEPLOY_CSRC_APIS_C_EXECUTOR_H_ + +#include "mmdeploy/common.h" + +#if __cplusplus +extern "C" { +#endif + +/****************************************************************************** + * Experimental asynchronous APIs */ + +typedef mmdeploy_value_t (*mmdeploy_then_fn_t)(mmdeploy_value_t, void*); + +typedef mmdeploy_value_t (*mmdeploy_then_fn_v2_t)(mmdeploy_value_t*, void*); + +typedef int (*mmdeploy_then_fn_v3_t)(mmdeploy_value_t* input, mmdeploy_value_t* output, void*); + +struct mmdeploy_sender; +struct mmdeploy_scheduler; + +typedef struct mmdeploy_sender* mmdeploy_sender_t; +typedef struct mmdeploy_scheduler* mmdeploy_scheduler_t; + +typedef mmdeploy_sender_t (*mmdeploy_let_value_fn_t)(mmdeploy_value_t, void*); + +/////////////////////////////////////////////////////////////////////////////// +// Scheduler +/////////////////////////////////////////////////////////////////////////////// +MMDEPLOY_API mmdeploy_scheduler_t mmdeploy_executor_inline(); + +MMDEPLOY_API mmdeploy_scheduler_t mmdeploy_executor_system_pool(); + +/** + * Create a thread pool with the given number of worker threads + * @param[in] num_threads + * @return the handle to the created thread pool + */ +MMDEPLOY_API mmdeploy_scheduler_t mmdeploy_executor_create_thread_pool(int num_threads); + +MMDEPLOY_API mmdeploy_scheduler_t mmdeploy_executor_create_thread(); + +MMDEPLOY_API mmdeploy_scheduler_t mmdeploy_executor_dynamic_batch(mmdeploy_scheduler_t scheduler, + int max_batch_size, int timeout); + +MMDEPLOY_API int mmdeploy_scheduler_destroy(mmdeploy_scheduler_t scheduler); + +/////////////////////////////////////////////////////////////////////////////// +// Utilities +/////////////////////////////////////////////////////////////////////////////// + +/** + * @brief Create a copy of a copyable sender. Only senders created by \ref mmdeploy_executor_split + * is copyable for now. + * @param[in] input copyable sender, + * @return the sender created, or nullptr if the sender is not copyable + */ +MMDEPLOY_API mmdeploy_sender_t mmdeploy_sender_copy(mmdeploy_sender_t input); + +/** + * @brief Destroy a sender, notice that all sender adapters will consume input senders, only unused + * senders should be destroyed using this function. + * @param[in] input + */ +MMDEPLOY_API int mmdeploy_sender_destroy(mmdeploy_sender_t sender); + +/////////////////////////////////////////////////////////////////////////////// +// Sender factories +/////////////////////////////////////////////////////////////////////////////// + +/** + * @brief Create a sender that sends the provided value + * @param[in] value + * @return created sender + */ +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_just(mmdeploy_value_t value); + +/** + * @brief + * @param[in] scheduler + * @return the sender created + */ +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_schedule(mmdeploy_scheduler_t scheduler); + +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_transfer_just(mmdeploy_scheduler_t scheduler, + mmdeploy_value_t value); + +/////////////////////////////////////////////////////////////////////////////// +// Sender adapters +/////////////////////////////////////////////////////////////////////////////// + +/** + * Transfer the execution to the execution agent of the provided scheduler + * @param[in] input + * @param[in] scheduler + * @return the sender created + */ +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_transfer(mmdeploy_sender_t input, + mmdeploy_scheduler_t scheduler); + +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_on(mmdeploy_scheduler_t scheduler, + mmdeploy_sender_t input); + +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_then(mmdeploy_sender_t input, + mmdeploy_then_fn_t fn, void* context); + +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_let_value(mmdeploy_sender_t input, + mmdeploy_let_value_fn_t fn, + void* context); + +/** + * Convert the input sender into a sender that is copyable via \ref mmdeploy_sender_copy. Notice + * that this function doesn't make the sender multi-shot, it just return a sender that is copyable. + * @param[in] input + * @return the sender that is copyable + */ +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_split(mmdeploy_sender_t input); + +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_when_all(mmdeploy_sender_t inputs[], int32_t n); + +MMDEPLOY_API mmdeploy_sender_t mmdeploy_executor_ensure_started(mmdeploy_sender_t input); + +/////////////////////////////////////////////////////////////////////////////// +// Sender consumers +/////////////////////////////////////////////////////////////////////////////// +MMDEPLOY_API int mmdeploy_executor_start_detached(mmdeploy_sender_t input); + +MMDEPLOY_API mmdeploy_value_t mmdeploy_executor_sync_wait(mmdeploy_sender_t input); + +MMDEPLOY_API int mmdeploy_executor_sync_wait_v2(mmdeploy_sender_t input, mmdeploy_value_t* output); + +MMDEPLOY_API void mmdeploy_executor_execute(mmdeploy_scheduler_t scheduler, void (*fn)(void*), + void* context); + +#if __cplusplus +} +#endif + +#endif // MMDEPLOY_CSRC_APIS_C_EXECUTOR_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/executor_internal.h b/csrc/mmdeploy/apis/c/mmdeploy/executor_internal.h new file mode 100644 index 0000000000000000000000000000000000000000..95f39fe009069f3245d161956cdbaffdb455e7c0 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/executor_internal.h @@ -0,0 +1,41 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#ifndef MMDEPLOY_CSRC_APIS_C_EXECUTOR_INTERNAL_H_ +#define MMDEPLOY_CSRC_APIS_C_EXECUTOR_INTERNAL_H_ + +#include "mmdeploy/execution/schedulers/registry.h" +#include "mmdeploy/executor.h" + +using namespace mmdeploy; + +using SenderType = TypeErasedSender; +using SchedulerType = TypeErasedScheduler; + +namespace { + +inline SchedulerType* Cast(mmdeploy_scheduler_t s) { return reinterpret_cast(s); } + +inline mmdeploy_scheduler_t Cast(SchedulerType* s) { + return reinterpret_cast(s); +} + +inline SenderType* Cast(mmdeploy_sender_t s) { return reinterpret_cast(s); } + +inline mmdeploy_sender_t Cast(SenderType* s) { return reinterpret_cast(s); } + +inline SenderType Take(mmdeploy_sender_t s) { + auto sender = std::move(*Cast(s)); + mmdeploy_sender_destroy(s); + return sender; +} + +inline mmdeploy_sender_t Take(SenderType s) { return Cast(new SenderType(std::move(s))); } + +template , int> = 0> +inline mmdeploy_sender_t Take(T& s) { + return Take(SenderType(std::move(s))); +} + +} // namespace + +#endif // MMDEPLOY_CSRC_APIS_C_EXECUTOR_INTERNAL_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/handle.h b/csrc/mmdeploy/apis/c/mmdeploy/handle.h new file mode 100644 index 0000000000000000000000000000000000000000..006ddaae3de807cc1374b74dec373ef4c87dbf18 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/handle.h @@ -0,0 +1,53 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#ifndef MMDEPLOY_SRC_APIS_C_HANDLE_H_ +#define MMDEPLOY_SRC_APIS_C_HANDLE_H_ + +#include + +#include "mmdeploy/core/device.h" +#include "mmdeploy/core/graph.h" +#include "mmdeploy/core/value.h" +#include "mmdeploy/graph/common.h" +#include "mmdeploy/graph/static_router.h" + +namespace mmdeploy { + +using namespace framework; + +namespace { + +class AsyncHandle { + public: + AsyncHandle(const char* device_name, int device_id, Value config) + : AsyncHandle(SetContext(std::move(config), device_name, device_id)) {} + + explicit AsyncHandle(const Value& config) { + if (auto builder = graph::Builder::CreateFromConfig(config).value()) { + node_ = builder->Build().value(); + } else { + MMDEPLOY_ERROR("failed to find creator for node"); + throw_exception(eEntryNotFound); + } + } + + graph::Sender Process(graph::Sender input) { + return node_->Process(std::move(input)); + } + + private: + static Value SetContext(Value config, const char* device_name, int device_id) { + Device device(device_name, device_id); + Stream stream(device); + config["context"].update({{"device", device}, {"stream", stream}}); + return config; + } + + std::unique_ptr node_; +}; + +} // namespace + +} // namespace mmdeploy + +#endif // MMDEPLOY_SRC_APIS_C_HANDLE_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/model.cpp b/csrc/mmdeploy/apis/c/mmdeploy/model.cpp new file mode 100644 index 0000000000000000000000000000000000000000..6d202bce8142bb1cff723b0760b7f29102681b05 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/model.cpp @@ -0,0 +1,41 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +// clang-format off +#include "mmdeploy/model.h" + +#include + +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/logger.h" +#include "mmdeploy/core/model.h" +// clang-format on + +using namespace mmdeploy; + +int mmdeploy_model_create_by_path(const char* path, mmdeploy_model_t* model) { + try { + auto ptr = std::make_unique(path); + *model = reinterpret_cast(ptr.release()); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("failed to create model: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +int mmdeploy_model_create(const void* buffer, int size, mmdeploy_model_t* model) { + try { + auto ptr = std::make_unique(buffer, size); + *model = reinterpret_cast(ptr.release()); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("failed to create model: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +void mmdeploy_model_destroy(mmdeploy_model_t model) { delete reinterpret_cast(model); } diff --git a/csrc/mmdeploy/apis/c/mmdeploy/model.h b/csrc/mmdeploy/apis/c/mmdeploy/model.h new file mode 100644 index 0000000000000000000000000000000000000000..394d2902c29e39f58dbaa148a84a03f0df44867a --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/model.h @@ -0,0 +1,47 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file model.h + * @brief Interface to MMDeploy SDK Model + */ + +#ifndef MMDEPLOY_SRC_APIS_C_MODEL_H_ +#define MMDEPLOY_SRC_APIS_C_MODEL_H_ + +#include "mmdeploy/common.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_model* mmdeploy_model_t; + +/** + * @brief Create SDK Model instance from given model path + * @param[in] path model path + * @param[out] model sdk model instance that must be destroyed by \ref mmdeploy_model_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_model_create_by_path(const char* path, mmdeploy_model_t* model); + +/** + * @brief Create SDK Model instance from memory + * @param[in] buffer a linear buffer contains the model information + * @param[in] size size of \p buffer in bytes + * @param[out] model sdk model instance that must be destroyed by \ref mmdeploy_model_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_model_create(const void* buffer, int size, mmdeploy_model_t* model); + +/** + * @brief Destroy model instance + * @param[in] model sdk model instance created by \ref mmdeploy_model_create_by_path or \ref + * mmdeploy_model_create + */ +MMDEPLOY_API void mmdeploy_model_destroy(mmdeploy_model_t model); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_SRC_APIS_C_MODEL_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/pipeline.cpp b/csrc/mmdeploy/apis/c/mmdeploy/pipeline.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a9a02807ee0fc5f469a827d861dc49f9fdf17fe3 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/pipeline.cpp @@ -0,0 +1,78 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/pipeline.h" + +#include "mmdeploy/common_internal.h" +#include "mmdeploy/executor_internal.h" +#include "mmdeploy/handle.h" + +int mmdeploy_pipeline_create_v3(mmdeploy_value_t config, mmdeploy_context_t context, + mmdeploy_pipeline_t* pipeline) { + try { + auto _config = *Cast(config); + if (context) { + if (!_config.contains("context")) { + _config["context"] = Value::Object(); + } + update(_config["context"].object(), Cast(context)->object(), 2); + } + auto _handle = std::make_unique(std::move(_config)); + *pipeline = Cast(_handle.release()); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("exception caught: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +int mmdeploy_pipeline_create_from_model(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_pipeline_t* pipeline) { + auto config = Cast(model)->ReadConfig("pipeline.json"); + auto _context = *Cast(context); + _context["model"] = *Cast(model); + return mmdeploy_pipeline_create_v3(Cast(&config.value()), (mmdeploy_context_t)&_context, + pipeline); +} + +int mmdeploy_pipeline_apply_async(mmdeploy_pipeline_t pipeline, mmdeploy_sender_t input, + mmdeploy_sender_t* output) { + if (!pipeline || !input || !output) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + auto h = Cast(pipeline); + *output = Take(h->Process(Take(input))); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("exception caught: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +void mmdeploy_pipeline_destroy(mmdeploy_pipeline_t pipeline) { + if (pipeline != nullptr) { + delete Cast(pipeline); + } +} + +int mmdeploy_pipeline_apply(mmdeploy_pipeline_t pipeline, mmdeploy_value_t input, + mmdeploy_value_t* output) { + auto input_sender = mmdeploy_executor_just(input); + if (!input_sender) { + return MMDEPLOY_E_FAIL; + } + mmdeploy_sender_t output_sender{}; + if (auto ec = mmdeploy_pipeline_apply_async(pipeline, input_sender, &output_sender)) { + return ec; + } + auto _output = mmdeploy_executor_sync_wait(output_sender); + if (!_output) { + return MMDEPLOY_E_FAIL; + } + *output = _output; + return MMDEPLOY_SUCCESS; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/pipeline.h b/csrc/mmdeploy/apis/c/mmdeploy/pipeline.h new file mode 100644 index 0000000000000000000000000000000000000000..55ccf1e67c1f4ad16d253675c2a57497d9dac2dc --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/pipeline.h @@ -0,0 +1,69 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#ifndef MMDEPLOY_CSRC_APIS_C_PIPELINE_H_ +#define MMDEPLOY_CSRC_APIS_C_PIPELINE_H_ + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/****************************************************************************** + * Experimental pipeline APIs */ + +typedef struct mmdeploy_pipeline* mmdeploy_pipeline_t; + +/** + * Create pipeline + * @param config + * @param context + * @param pipeline + * @return + */ +MMDEPLOY_API int mmdeploy_pipeline_create_v3(mmdeploy_value_t config, mmdeploy_context_t context, + mmdeploy_pipeline_t* pipeline); +/** + * Create pipeline from internal pipeline config of the model + * @param model + * @param context + * @param pipeline + * @return + */ +MMDEPLOY_API int mmdeploy_pipeline_create_from_model(mmdeploy_model_t model, + mmdeploy_context_t context, + mmdeploy_pipeline_t* pipeline); + +/** + * @brief Apply pipeline + * @param[in] pipeline handle of the pipeline + * @param[in] input input value + * @param[out] output output value + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_pipeline_apply(mmdeploy_pipeline_t pipeline, mmdeploy_value_t input, + mmdeploy_value_t* output); + +/** + * Apply pipeline asynchronously + * @param pipeline handle of the pipeline + * @param input input sender that will be consumed by the operation + * @param output output sender + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_pipeline_apply_async(mmdeploy_pipeline_t pipeline, + mmdeploy_sender_t input, mmdeploy_sender_t* output); + +/** + * @brief destroy pipeline + * @param[in] pipeline + */ +MMDEPLOY_API void mmdeploy_pipeline_destroy(mmdeploy_pipeline_t pipeline); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_CSRC_APIS_C_PIPELINE_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/pose_detector.cpp b/csrc/mmdeploy/apis/c/mmdeploy/pose_detector.cpp new file mode 100644 index 0000000000000000000000000000000000000000..46f9921e623ea30fda15cdf6760e9cc0d0d19602 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/pose_detector.cpp @@ -0,0 +1,179 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/pose_detector.h" + +#include + +#include "mmdeploy/codebase/mmpose/mmpose.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/device.h" +#include "mmdeploy/core/graph.h" +#include "mmdeploy/core/mat.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/handle.h" +#include "mmdeploy/pipeline.h" + +using namespace std; +using namespace mmdeploy; + +int mmdeploy_pose_detector_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_pose_detector_t* detector) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_pose_detector_create_v2(model, context, detector); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_pose_detector_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_pose_detector_t* detector) { + mmdeploy_model_t model{}; + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_pose_detector_create(model, device_name, device_id, detector); + mmdeploy_model_destroy(model); + return ec; +} + +int mmdeploy_pose_detector_apply(mmdeploy_pose_detector_t detector, const mmdeploy_mat_t* mats, + int mat_count, mmdeploy_pose_detection_t** results) { + return mmdeploy_pose_detector_apply_bbox(detector, mats, mat_count, nullptr, nullptr, results); +} + +int mmdeploy_pose_detector_apply_bbox(mmdeploy_pose_detector_t detector, const mmdeploy_mat_t* mats, + int mat_count, const mmdeploy_rect_t* bboxes, + const int* bbox_count, mmdeploy_pose_detection_t** results) { + wrapped input; + if (auto ec = + mmdeploy_pose_detector_create_input(mats, mat_count, bboxes, bbox_count, input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_pose_detector_apply_v2(detector, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_pose_detector_get_result(output, results)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_pose_detector_release_result(mmdeploy_pose_detection_t* results, int count) { + if (results == nullptr) { + return; + } + for (int i = 0; i < count; ++i) { + delete[] results[i].point; + delete[] results[i].score; + } + delete[] results; +} + +void mmdeploy_pose_detector_destroy(mmdeploy_pose_detector_t detector) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)detector); +} + +int mmdeploy_pose_detector_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_pose_detector_t* detector) { + return mmdeploy_pipeline_create_from_model(model, context, (mmdeploy_pipeline_t*)detector); +} + +int mmdeploy_pose_detector_create_input(const mmdeploy_mat_t* mats, int mat_count, + const mmdeploy_rect_t* bboxes, const int* bbox_count, + mmdeploy_value_t* value) { + if (mat_count && mats == nullptr) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + Value::Array input_images; + + auto add_bbox = [&](const Mat& img, const mmdeploy_rect_t* bbox) { + Value::Array b; + if (bbox) { + float width = bbox->right - bbox->left + 1; + float height = bbox->bottom - bbox->top + 1; + b = {bbox->left, bbox->top, width, height, 1.0}; + } else { + b = {0, 0, img.width(), img.height(), 1.0}; + } + input_images.push_back({{"ori_img", img}, {"bbox", std::move(b)}}); + }; + + for (int i = 0; i < mat_count; ++i) { + auto _mat = Cast(mats[i]); + if (bboxes && bbox_count) { + for (int j = 0; j < bbox_count[i]; ++j) { + add_bbox(_mat, bboxes++); + } + } else { // inference whole image + add_bbox(_mat, nullptr); + } + } + + *value = Take(Value{std::move(input_images)}); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +int mmdeploy_pose_detector_apply_v2(mmdeploy_pose_detector_t detector, mmdeploy_value_t input, + mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)detector, input, output); +} + +int mmdeploy_pose_detector_apply_async(mmdeploy_pose_detector_t detector, mmdeploy_sender_t input, + mmdeploy_sender_t* output) { + return mmdeploy_pipeline_apply_async((mmdeploy_pipeline_t)detector, input, output); +} + +int mmdeploy_pose_detector_get_result(mmdeploy_value_t output, + mmdeploy_pose_detection_t** results) { + if (!output || !results) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + std::vector detections; + from_value(Cast(output)->front(), detections); + + size_t count = detections.size(); + + auto deleter = [&](mmdeploy_pose_detection_t* p) { + mmdeploy_pose_detector_release_result(p, static_cast(count)); + }; + + std::unique_ptr _results( + new mmdeploy_pose_detection_t[count]{}, deleter); + + size_t result_idx = 0; + for (const auto& bbox_result : detections) { + auto& res = _results[result_idx++]; + auto size = bbox_result.key_points.size(); + + res.point = new mmdeploy_point_t[size]; + res.score = new float[size]; + res.length = static_cast(size); + + for (int k = 0; k < size; k++) { + res.point[k].x = bbox_result.key_points[k].bbox[0]; + res.point[k].y = bbox_result.key_points[k].bbox[1]; + res.score[k] = bbox_result.key_points[k].score; + } + } + + *results = _results.release(); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/pose_detector.h b/csrc/mmdeploy/apis/c/mmdeploy/pose_detector.h new file mode 100644 index 0000000000000000000000000000000000000000..ff0987cee45fb8c308639fa2abdff812c31b6eb6 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/pose_detector.h @@ -0,0 +1,126 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file pose_detector.h + * @brief Interface to MMPose task + */ + +#ifndef MMDEPLOY_SRC_APIS_C_POSE_DETECTOR_H_ +#define MMDEPLOY_SRC_APIS_C_POSE_DETECTOR_H_ + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_pose_detection_t { + mmdeploy_point_t* point; ///< keypoint + float* score; ///< keypoint score + int length; ///< number of keypoint +} mmdeploy_pose_detection_t; + +typedef struct mmdeploy_pose_detector* mmdeploy_pose_detector_t; + +/** + * @brief Create a pose detector instance + * @param[in] model an instance of mmpose model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] detector handle of the created pose detector, which must be destroyed + * by \ref mmdeploy_pose_detector_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_pose_detector_create(mmdeploy_model_t model, const char* device_name, + int device_id, mmdeploy_pose_detector_t* detector); + +/** + * @brief Create a pose detector instance + * @param[in] model_path path to pose detection model + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] detector handle of the created pose detector, which must be destroyed + * by \ref mmdeploy_pose_detector_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_pose_detector_create_by_path(const char* model_path, + const char* device_name, int device_id, + mmdeploy_pose_detector_t* detector); + +/** + * @brief Apply pose detector to a batch of images with full image roi + * @param[in] detector pose detector's handle created by \ref + * mmdeploy_pose_detector_create_by_path + * @param[in] images a batch of images + * @param[in] count number of images in the batch + * @param[out] results a linear buffer contains the pose result, must be release + * by \ref mmdeploy_pose_detector_release_result + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_pose_detector_apply(mmdeploy_pose_detector_t detector, + const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_pose_detection_t** results); + +/** + * @brief Apply pose detector to a batch of images supplied with bboxes(roi) + * @param[in] detector pose detector's handle created by \ref + * mmdeploy_pose_detector_create_by_path + * @param[in] images a batch of images + * @param[in] image_count number of images in the batch + * @param[in] bboxes bounding boxes(roi) detected by mmdet + * @param[in] bbox_count number of bboxes of each \p images, must be same length as \p images + * @param[out] results a linear buffer contains the pose result, which has the same length as \p + * bboxes, must be release by \ref mmdeploy_pose_detector_release_result + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_pose_detector_apply_bbox(mmdeploy_pose_detector_t detector, + const mmdeploy_mat_t* mats, int mat_count, + const mmdeploy_rect_t* bboxes, + const int* bbox_count, + mmdeploy_pose_detection_t** results); + +/** @brief Release result buffer returned by \ref mmdeploy_pose_detector_apply or \ref + * mmdeploy_pose_detector_apply_bbox + * @param[in] results result buffer by pose detector + * @param[in] count length of \p result + */ +MMDEPLOY_API void mmdeploy_pose_detector_release_result(mmdeploy_pose_detection_t* results, + int count); + +/** + * @brief destroy pose_detector + * @param[in] detector handle of pose_detector created by \ref + * mmdeploy_pose_detector_create_by_path or \ref mmdeploy_pose_detector_create + */ +MMDEPLOY_API void mmdeploy_pose_detector_destroy(mmdeploy_pose_detector_t detector); + +/****************************************************************************** + * Experimental asynchronous APIs */ + +MMDEPLOY_API int mmdeploy_pose_detector_create_v2(mmdeploy_model_t model, + mmdeploy_context_t context, + mmdeploy_pose_detector_t* detector); + +MMDEPLOY_API int mmdeploy_pose_detector_create_input(const mmdeploy_mat_t* mats, int mat_count, + const mmdeploy_rect_t* bboxes, + const int* bbox_count, + mmdeploy_value_t* value); + +MMDEPLOY_API int mmdeploy_pose_detector_apply_v2(mmdeploy_pose_detector_t detector, + mmdeploy_value_t input, mmdeploy_value_t* output); + +MMDEPLOY_API int mmdeploy_pose_detector_apply_async(mmdeploy_pose_detector_t detector, + mmdeploy_sender_t input, + mmdeploy_sender_t* output); + +MMDEPLOY_API int mmdeploy_pose_detector_get_result(mmdeploy_value_t output, + mmdeploy_pose_detection_t** results); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_SRC_APIS_C_POSE_DETECTOR_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/pose_tracker.cpp b/csrc/mmdeploy/apis/c/mmdeploy/pose_tracker.cpp new file mode 100644 index 0000000000000000000000000000000000000000..113b520c39b39144930509a92f5922eba8681239 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/pose_tracker.cpp @@ -0,0 +1,225 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/pose_tracker.h" + +#include "mmdeploy/archive/json_archive.h" +#include "mmdeploy/archive/value_archive.h" +#include "mmdeploy/codebase/mmpose/pose_tracker/common.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/mpl/structure.h" +#include "mmdeploy/pipeline.h" + +namespace mmdeploy { + +using namespace framework; + +} // namespace mmdeploy + +using namespace mmdeploy; + +namespace { + +Value config_template() { + static const auto json = R"( +{ + "type": "Pipeline", + "input": ["img", "force_det", "state"], + "output": "targets", + "tasks": [ + { + "type": "Task", + "name": "prepare", + "module": "pose_tracker::Prepare", + "input": ["img", "force_det", "state"], + "output": "use_det" + }, + { + "type": "Task", + "module": "Transform", + "name": "preload", + "input": "img", + "output": "data", + "transforms": [ { "type": "LoadImageFromFile" } ] + }, + { + "type": "Cond", + "name": "cond", + "input": ["use_det", "data"], + "output": "dets", + "body": { + "name": "detection", + "type": "Inference", + "params": { "model": "detection" } + } + }, + { + "type": "Task", + "name": "process_bboxes", + "module": "pose_tracker::ProcessBboxes", + "input": ["dets", "data", "state"], + "output": ["rois", "track_ids"] + }, + { + "input": "*rois", + "output": "*keypoints", + "name": "pose", + "type": "Inference", + "params": { "model": "pose" } + }, + { + "type": "Task", + "name": "track_step", + "module": "pose_tracker::TrackStep", + "scheduler": "pool", + "input": ["keypoints", "track_ids", "state"], + "output": "targets" + } + ] +} +)"_json; + static const auto config = from_json(json); + return config; +} + +} // namespace + +int mmdeploy_pose_tracker_default_params(mmdeploy_pose_tracker_param_t* params) { + mmpose::_pose_tracker::SetDefaultParams(*params); + return 0; +} + +int mmdeploy_pose_tracker_create(mmdeploy_model_t det_model, mmdeploy_model_t pose_model, + mmdeploy_context_t context, mmdeploy_pose_tracker_t* pipeline) { + mmdeploy_context_add(context, MMDEPLOY_TYPE_MODEL, "detection", det_model); + mmdeploy_context_add(context, MMDEPLOY_TYPE_MODEL, "pose", pose_model); + auto config = config_template(); + return mmdeploy_pipeline_create_v3(Cast(&config), context, (mmdeploy_pipeline_t*)pipeline); +} + +void mmdeploy_pose_tracker_destroy(mmdeploy_pose_tracker_t pipeline) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)pipeline); +} + +int mmdeploy_pose_tracker_create_state(mmdeploy_pose_tracker_t pipeline, + const mmdeploy_pose_tracker_param_t* params, + mmdeploy_pose_tracker_state_t* state) { + try { + auto create_fn = gRegistry().Create("pose_tracker::Create", Value()).value(); + *state = reinterpret_cast(new Value( + create_fn->Process({const_cast(params)}).value()[0])); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +void mmdeploy_pose_tracker_destroy_state(mmdeploy_pose_tracker_state_t state) { + delete reinterpret_cast(state); +} + +int mmdeploy_pose_tracker_create_input(mmdeploy_pose_tracker_state_t* states, + const mmdeploy_mat_t* frames, const int32_t* use_detect, + int batch_size, mmdeploy_value_t* value) { + try { + Value::Array images; + Value::Array use_dets; + Value::Array trackers; + for (int i = 0; i < batch_size; ++i) { + images.push_back({{"ori_img", Cast(frames[i])}}); + use_dets.emplace_back(use_detect ? use_detect[i] : -1); + trackers.push_back(*reinterpret_cast(states[i])); + } + *value = Take(Value{std::move(images), std::move(use_dets), std::move(trackers)}); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +using ResultType = mmdeploy::Structure, + std::vector>; + +int mmdeploy_pose_tracker_get_result(mmdeploy_value_t output, + mmdeploy_pose_tracker_target_t** results, + int32_t** result_count) { + if (!output || !results) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + // convert result from Values + std::vector res; + from_value(Cast(output)->front(), res); + + size_t total = 0; + for (const auto& r : res) { + total += r.bboxes.size(); + } + + // preserve space for the output structure + ResultType result_type({total, 1, 1}); + auto [result_data, result_cnt, result_holder] = result_type.pointers(); + + auto result_ptr = result_data; + + result_holder->swap(res); + + // build output structure + for (auto& r : *result_holder) { + for (int j = 0; j < r.bboxes.size(); ++j) { + auto& p = *result_ptr++; + p.keypoint_count = static_cast(r.keypoints[j].size()); + p.keypoints = r.keypoints[j].data(); + p.scores = r.scores[j].data(); + p.bbox = r.bboxes[j]; + p.target_id = r.track_ids[j]; + } + result_cnt->push_back(r.bboxes.size()); + // debug info + // p.reserved0 = new std::vector(r.pose_input_bboxes); + // p.reserved1 = new std::vector(r.pose_output_bboxes); + } + + *results = result_data; + *result_count = result_cnt->data(); + result_type.release(); + + return MMDEPLOY_SUCCESS; + + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +int mmdeploy_pose_tracker_apply(mmdeploy_pose_tracker_t pipeline, + mmdeploy_pose_tracker_state_t* states, const mmdeploy_mat_t* frames, + const int32_t* use_detect, int32_t count, + mmdeploy_pose_tracker_target_t** results, int32_t** result_count) { + wrapped input; + if (auto ec = + mmdeploy_pose_tracker_create_input(states, frames, use_detect, count, input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_pipeline_apply((mmdeploy_pipeline_t)pipeline, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_pose_tracker_get_result(output, results, result_count)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_pose_tracker_release_result(mmdeploy_pose_tracker_target_t* results, + const int32_t* result_count, int count) { + auto total = std::accumulate(result_count, result_count + count, 0); + ResultType deleter({static_cast(total), 1, 1}, results); +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/pose_tracker.h b/csrc/mmdeploy/apis/c/mmdeploy/pose_tracker.h new file mode 100644 index 0000000000000000000000000000000000000000..4b27fbab8a6add9a8047a9ef79222f74d09bbaf2 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/pose_tracker.h @@ -0,0 +1,158 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file pose_tracker.h + * @brief Pose tracker C API + */ + +#ifndef MMDEPLOY_POSE_TRACKER_H +#define MMDEPLOY_POSE_TRACKER_H + +#include "mmdeploy/common.h" +#include "mmdeploy/detector.h" +#include "mmdeploy/model.h" +#include "mmdeploy/pose_detector.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_pose_tracker* mmdeploy_pose_tracker_t; +typedef struct mmdeploy_pose_tracker_state* mmdeploy_pose_tracker_state_t; + +typedef struct mmdeploy_pose_tracker_param_t { + // detection interval, default = 1 + int32_t det_interval; + // detection label use for pose estimation, default = 0 + int32_t det_label; + // detection score threshold, default = 0.5 + float det_thr; + // detection minimum bbox size (compute as sqrt(area)), default = -1 + float det_min_bbox_size; + // nms iou threshold for merging detected bboxes and bboxes from tracked targets, default = 0.7 + float det_nms_thr; + + // max number of bboxes used for pose estimation per frame, default = -1 + int32_t pose_max_num_bboxes; + // threshold for visible key-points, default = 0.5 + float pose_kpt_thr; + // min number of key-points for valid poses (-1 indicates ceil(n_kpts/2)), default = -1 + int32_t pose_min_keypoints; + // scale for expanding key-points to bbox, default = 1.25 + float pose_bbox_scale; + // min pose bbox size, tracks with bbox size smaller than the threshold will be dropped, + // default = -1 + float pose_min_bbox_size; + // nms oks/iou threshold for suppressing overlapped poses, useful when multiple pose estimations + // collapse to the same target, default = 0.5 + float pose_nms_thr; + // keypoint sigmas for computing OKS, will use IOU if not set, default = nullptr + float* keypoint_sigmas; + // size of keypoint sigma array, must be consistent with the number of key-points, default = 0 + int32_t keypoint_sigmas_size; + + // iou threshold for associating missing tracks, default = 0.4 + float track_iou_thr; + // max number of missing frames before a missing tracks is removed, default = 10 + int32_t track_max_missing; + // track history size, default = 1 + int32_t track_history_size; + + // weight of position for setting covariance matrices of kalman filters, default = 0.05 + float std_weight_position; + // weight of velocity for setting covariance matrices of kalman filters, default = 0.00625 + float std_weight_velocity; + + // params for the one-euro filter for smoothing the outputs - (beta, fc_min, fc_derivative) + // default = (0.007, 1, 1) + float smooth_params[3]; +} mmdeploy_pose_tracker_param_t; + +typedef struct mmdeploy_pose_tracker_target_t { + mmdeploy_point_t* keypoints; // key-points of the target + int32_t keypoint_count; // size of `keypoints` array + float* scores; // scores of each key-point + mmdeploy_rect_t bbox; // estimated bbox from key-points + uint32_t target_id; // target id from internal tracker +} mmdeploy_pose_tracker_target_t; + +/** + * @brief Fill params with default parameters + * @param[in,out] params + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_pose_tracker_default_params(mmdeploy_pose_tracker_param_t* params); + +/** + * @brief Create pose tracker pipeline + * @param[in] det_model detection model object, created by \ref mmdeploy_model_create + * @param[in] pose_model pose model object + * @param[in] context context object describing execution environment (device, profiler, etc...), + * created by \ref mmdeploy_context_create + * @param[out] pipeline handle of the created pipeline + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_pose_tracker_create(mmdeploy_model_t det_model, + mmdeploy_model_t pose_model, + mmdeploy_context_t context, + mmdeploy_pose_tracker_t* pipeline); + +/** + * @brief Destroy pose tracker pipeline + * @param[in] pipeline + */ +MMDEPLOY_API void mmdeploy_pose_tracker_destroy(mmdeploy_pose_tracker_t pipeline); + +/** + * @brief Create a tracker state handle corresponds to a video stream + * @param[in] pipeline handle of a pose tracker pipeline + * @param[in] params params for creating the tracker state + * @param[out] state handle of the created tracker state + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_pose_tracker_create_state(mmdeploy_pose_tracker_t pipeline, + const mmdeploy_pose_tracker_param_t* params, + mmdeploy_pose_tracker_state_t* state); + +/** + * @brief Destroy tracker state + * @param[in] state handle of the tracker state + */ +MMDEPLOY_API void mmdeploy_pose_tracker_destroy_state(mmdeploy_pose_tracker_state_t state); + +/** + * @brief Apply pose tracker pipeline, notice that this function supports batch operation by feeding + * arrays of size \p count to \p states, \p frames and \p use_detect + * @param[in] pipeline handle of a pose tracker pipeline + * @param[in] states tracker states handles, array of size \p count + * @param[in] frames input frames of size \p count + * @param[in] use_detect control the use of detector, array of size \p count + * -1: use params.det_interval, 0: don't use detector, 1: force use detector + * @param[in] count batch size + * @param[out] results a linear buffer contains the tracked targets of input frames. Should be + * released by \ref mmdeploy_pose_tracker_release_result + * @param[out] result_count a linear buffer of size \p count contains the number of tracked + * targets of the frames. Should be released by \ref mmdeploy_pose_tracker_release_result + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_pose_tracker_apply(mmdeploy_pose_tracker_t pipeline, + mmdeploy_pose_tracker_state_t* states, + const mmdeploy_mat_t* frames, + const int32_t* use_detect, int32_t count, + mmdeploy_pose_tracker_target_t** results, + int32_t** result_count); + +/** + * @brief Release result objects + * @param[in] results + * @param[in] result_count + * @param[in] count + */ +MMDEPLOY_API void mmdeploy_pose_tracker_release_result(mmdeploy_pose_tracker_target_t* results, + const int32_t* result_count, int count); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_POSE_TRACKER_H diff --git a/csrc/mmdeploy/apis/c/mmdeploy/restorer.cpp b/csrc/mmdeploy/apis/c/mmdeploy/restorer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9ca2ca65f7886f357fb5a8b79cc5278c9f025152 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/restorer.cpp @@ -0,0 +1,121 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/restorer.h" + +#include "mmdeploy/codebase/mmedit/mmedit.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/device.h" +#include "mmdeploy/core/graph.h" +#include "mmdeploy/core/mpl/structure.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/executor_internal.h" +#include "mmdeploy/handle.h" +#include "mmdeploy/pipeline.h" + +using namespace mmdeploy; + +using ResultType = mmdeploy::Structure; + +int mmdeploy_restorer_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_restorer_t* restorer) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_restorer_create_v2(model, context, restorer); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_restorer_create_by_path(const char* model_path, const char* device_name, int device_id, + mmdeploy_restorer_t* restorer) { + mmdeploy_model_t model{}; + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_restorer_create(model, device_name, device_id, restorer); + mmdeploy_model_destroy(model); + return ec; +} + +int mmdeploy_restorer_apply(mmdeploy_restorer_t restorer, const mmdeploy_mat_t* images, int count, + mmdeploy_mat_t** results) { + wrapped input; + if (auto ec = mmdeploy_restorer_create_input(images, count, input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_restorer_apply_v2(restorer, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_restorer_get_result(output, results)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_restorer_release_result(mmdeploy_mat_t* results, int count) { + ResultType deleter{static_cast(count), results}; +} + +void mmdeploy_restorer_destroy(mmdeploy_restorer_t restorer) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)restorer); +} + +int mmdeploy_restorer_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_restorer_t* restorer) { + return mmdeploy_pipeline_create_from_model(model, context, (mmdeploy_pipeline_t*)restorer); +} + +int mmdeploy_restorer_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* value) { + return mmdeploy_common_create_input(mats, mat_count, value); +} + +int mmdeploy_restorer_apply_v2(mmdeploy_restorer_t restorer, mmdeploy_value_t input, + mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)restorer, input, output); +} + +int mmdeploy_restorer_apply_async(mmdeploy_restorer_t restorer, mmdeploy_sender_t input, + mmdeploy_sender_t* output) { + return mmdeploy_pipeline_apply_async((mmdeploy_pipeline_t)restorer, input, output); +} + +int mmdeploy_restorer_get_result(mmdeploy_value_t output, mmdeploy_mat_t** results) { + if (!output || !results) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + const Value& value = Cast(output)->front(); + + auto restorer_output = from_value>(value); + auto count = restorer_output.size(); + + ResultType r(count); + auto [_results, buffers] = r.pointers(); + + for (int i = 0; i < count; ++i) { + auto upscale = restorer_output[i]; + auto& res = _results[i]; + res.data = upscale.data(); + buffers[i] = upscale.buffer(); + res.format = (mmdeploy_pixel_format_t)upscale.pixel_format(); + res.height = upscale.height(); + res.width = upscale.width(); + res.channel = upscale.channel(); + res.type = (mmdeploy_data_type_t)upscale.type(); + } + + *results = _results; + r.release(); + + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/restorer.h b/csrc/mmdeploy/apis/c/mmdeploy/restorer.h new file mode 100644 index 0000000000000000000000000000000000000000..9ab529850f4b33935710ef85aec1ba057e1073c3 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/restorer.h @@ -0,0 +1,91 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file restorer.h + * @brief Interface to MMEditing image restoration task + */ + +#ifndef MMDEPLOY_SRC_APIS_C_RESTORER_H_ +#define MMDEPLOY_SRC_APIS_C_RESTORER_H_ + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_restorer* mmdeploy_restorer_t; + +/** + * @brief Create a restorer instance + * @param[in] model an instance of image restoration model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] restorer handle of the created restorer, which must be destroyed + * by \ref mmdeploy_restorer_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_restorer_create(mmdeploy_model_t model, const char* device_name, + int device_id, mmdeploy_restorer_t* restorer); + +/** + * @brief Create a restorer instance + * @param[in] model_path path to image restoration model + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] restorer handle of the created restorer, which must be destroyed + * by \ref mmdeploy_restorer_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_restorer_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_restorer_t* restorer); + +/** + * @brief Apply restorer to a batch of images + * @param[in] restorer restorer's handle created by \ref mmdeploy_restorer_create_by_path + * @param[in] images a batch of images + * @param[in] count number of images in the batch + * @param[out] results a linear buffer contains the restored images, must be release + * by \ref mmdeploy_restorer_release_result + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_restorer_apply(mmdeploy_restorer_t restorer, const mmdeploy_mat_t* images, + int count, mmdeploy_mat_t** results); + +/** @brief Release result buffer returned by \ref mmdeploy_restorer_apply + * @param[in] results result buffer by restorer + * @param[in] count length of \p result + */ +MMDEPLOY_API void mmdeploy_restorer_release_result(mmdeploy_mat_t* results, int count); + +/** + * @brief destroy restorer + * @param[in] restorer handle of restorer created by \ref mmdeploy_restorer_create_by_path + */ +MMDEPLOY_API void mmdeploy_restorer_destroy(mmdeploy_restorer_t restorer); + +/****************************************************************************** + * Experimental asynchronous APIs */ + +MMDEPLOY_API int mmdeploy_restorer_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_restorer_t* restorer); + +MMDEPLOY_API int mmdeploy_restorer_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* value); + +MMDEPLOY_API int mmdeploy_restorer_apply_v2(mmdeploy_restorer_t restorer, mmdeploy_value_t input, + mmdeploy_value_t* output); + +MMDEPLOY_API int mmdeploy_restorer_apply_async(mmdeploy_restorer_t restorer, + mmdeploy_sender_t input, mmdeploy_sender_t* output); + +MMDEPLOY_API int mmdeploy_restorer_get_result(mmdeploy_value_t output, mmdeploy_mat_t** results); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_SRC_APIS_C_RESTORER_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/rotated_detector.cpp b/csrc/mmdeploy/apis/c/mmdeploy/rotated_detector.cpp new file mode 100644 index 0000000000000000000000000000000000000000..d2172c54b8586e126a6ec8b6b78bd2360c88607d --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/rotated_detector.cpp @@ -0,0 +1,138 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/rotated_detector.h" + +#include + +#include "mmdeploy/codebase/mmrotate/mmrotate.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/graph.h" +#include "mmdeploy/core/mat.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/handle.h" +#include "mmdeploy/pipeline.h" + +using namespace std; +using namespace mmdeploy; + +int mmdeploy_rotated_detector_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_rotated_detector_t* detector) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_rotated_detector_create_v2(model, context, detector); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_rotated_detector_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_rotated_detector_t* detector) { + mmdeploy_model_t model{}; + + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_rotated_detector_create(model, device_name, device_id, detector); + mmdeploy_model_destroy(model); + return ec; +} + +int mmdeploy_rotated_detector_apply(mmdeploy_rotated_detector_t detector, + const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_rotated_detection_t** results, int** result_count) { + wrapped input; + if (auto ec = mmdeploy_rotated_detector_create_input(mats, mat_count, input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_rotated_detector_apply_v2(detector, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_rotated_detector_get_result(output, results, result_count)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_rotated_detector_release_result(mmdeploy_rotated_detection_t* results, + const int* result_count) { + delete[] results; + delete[] result_count; +} + +void mmdeploy_rotated_detector_destroy(mmdeploy_rotated_detector_t detector) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)detector); +} + +int mmdeploy_rotated_detector_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_rotated_detector_t* detector) { + return mmdeploy_pipeline_create_from_model(model, context, (mmdeploy_pipeline_t*)detector); +} + +int mmdeploy_rotated_detector_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* input) { + return mmdeploy_common_create_input(mats, mat_count, input); +} + +int mmdeploy_rotated_detector_apply_v2(mmdeploy_rotated_detector_t detector, mmdeploy_value_t input, + mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)detector, input, output); +} + +int mmdeploy_rotated_detector_apply_async(mmdeploy_rotated_detector_t detector, + mmdeploy_sender_t input, mmdeploy_sender_t* output) { + return mmdeploy_pipeline_apply_async((mmdeploy_pipeline_t)detector, input, output); +} + +int mmdeploy_rotated_detector_get_result(mmdeploy_value_t output, + mmdeploy_rotated_detection_t** results, + int** result_count) { + if (!output || !results || !result_count) { + return MMDEPLOY_E_INVALID_ARG; + } + + try { + Value& value = Cast(output)->front(); + auto detector_outputs = from_value>(value); + + vector _result_count; + _result_count.reserve(detector_outputs.size()); + for (const auto& det_output : detector_outputs) { + _result_count.push_back((int)det_output.detections.size()); + } + + auto total = std::accumulate(_result_count.begin(), _result_count.end(), 0); + + std::unique_ptr result_count_data(new int[_result_count.size()]{}); + std::copy(_result_count.begin(), _result_count.end(), result_count_data.get()); + + std::unique_ptr result_data( + new mmdeploy_rotated_detection_t[total]{}); + auto result_ptr = result_data.get(); + + for (const auto& det_output : detector_outputs) { + for (const auto& detection : det_output.detections) { + result_ptr->label_id = detection.label_id; + result_ptr->score = detection.score; + const auto& rbbox = detection.rbbox; + for (int i = 0; i < 5; i++) { + result_ptr->rbbox[i] = rbbox[i]; + } + ++result_ptr; + } + } + + *result_count = result_count_data.release(); + *results = result_data.release(); + + return MMDEPLOY_SUCCESS; + + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/rotated_detector.h b/csrc/mmdeploy/apis/c/mmdeploy/rotated_detector.h new file mode 100644 index 0000000000000000000000000000000000000000..35125a74fff6e3c0960760ac02ab8a976e9c2f34 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/rotated_detector.h @@ -0,0 +1,140 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file rotated_detector.h + * @brief Interface to MMRotate task + */ + +#ifndef MMDEPLOY_SRC_APIS_C_ROTATED_DETECTOR_H_ +#define MMDEPLOY_SRC_APIS_C_ROTATED_DETECTOR_H_ + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_rotated_detection_t { + int label_id; + float score; + float rbbox[5]; // cx, cy, w, h, angle +} mmdeploy_rotated_detection_t; + +typedef struct mmdeploy_rotated_detector* mmdeploy_rotated_detector_t; + +/** + * @brief Create rotated detector's handle + * @param[in] model an instance of mmrotate sdk model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] detector instance of a rotated detector + * @return status of creating rotated detector's handle + */ +MMDEPLOY_API int mmdeploy_rotated_detector_create(mmdeploy_model_t model, const char* device_name, + int device_id, + mmdeploy_rotated_detector_t* detector); + +/** + * @brief Create rotated detector's handle + * @param[in] model_path path of mmrotate sdk model exported by mmdeploy model converter + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] detector instance of a rotated detector + * @return status of creating rotated detector's handle + */ +MMDEPLOY_API int mmdeploy_rotated_detector_create_by_path(const char* model_path, + const char* device_name, int device_id, + mmdeploy_rotated_detector_t* detector); + +/** + * @brief Apply rotated detector to batch images and get their inference results + * @param[in] detector rotated detector's handle created by \ref + * mmdeploy_rotated_detector_create_by_path + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @param[out] results a linear buffer to save detection results of each image. It must be released + * by \ref mmdeploy_rotated_detector_release_result + * @param[out] result_count a linear buffer with length being \p mat_count to save the number of + * detection results of each image. And it must be released by \ref + * mmdeploy_rotated_detector_release_result + * @return status of inference + */ +MMDEPLOY_API int mmdeploy_rotated_detector_apply(mmdeploy_rotated_detector_t detector, + const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_rotated_detection_t** results, + int** result_count); + +/** @brief Release the inference result buffer created by \ref mmdeploy_rotated_detector_apply + * @param[in] results rotated detection results buffer + * @param[in] result_count \p results size buffer + */ +MMDEPLOY_API void mmdeploy_rotated_detector_release_result(mmdeploy_rotated_detection_t* results, + const int* result_count); + +/** + * @brief Destroy rotated detector's handle + * @param[in] detector rotated detector's handle created by \ref + * mmdeploy_rotated_detector_create_by_path or by \ref mmdeploy_rotated_detector_create + */ +MMDEPLOY_API void mmdeploy_rotated_detector_destroy(mmdeploy_rotated_detector_t detector); + +/****************************************************************************** + * Experimental asynchronous APIs */ + +/** + * @brief Same as \ref mmdeploy_detector_create, but allows to control execution context of tasks + * via context + */ +MMDEPLOY_API int mmdeploy_rotated_detector_create_v2(mmdeploy_model_t model, + mmdeploy_context_t context, + mmdeploy_rotated_detector_t* detector); + +/** + * @brief Pack rotated detector inputs into mmdeploy_value_t + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @return the created value + */ +MMDEPLOY_API int mmdeploy_rotated_detector_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* input); + +/** + * @brief Same as \ref mmdeploy_rotated_detector_apply, but input and output are packed in \ref + * mmdeploy_value_t. + */ +MMDEPLOY_API int mmdeploy_rotated_detector_apply_v2(mmdeploy_rotated_detector_t detector, + mmdeploy_value_t input, + mmdeploy_value_t* output); + +/** + * @brief Apply rotated detector asynchronously + * @param[in] detector handle to the detector + * @param[in] input input sender + * @return output sender + */ +MMDEPLOY_API int mmdeploy_rotated_detector_apply_async(mmdeploy_rotated_detector_t detector, + mmdeploy_sender_t input, + mmdeploy_sender_t* output); + +/** + * @brief Unpack rotated detector output from a mmdeploy_value_t + * @param[in] output output obtained by applying a detector + * @param[out] results a linear buffer to save detection results of each image. It must be released + * by \ref mmdeploy_detector_release_result + * @param[out] result_count a linear buffer with length number of input images to save the number of + * detection results of each image. Must be released by \ref + * mmdeploy_detector_release_result + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_rotated_detector_get_result(mmdeploy_value_t output, + mmdeploy_rotated_detection_t** results, + int** result_count); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_SRC_APIS_C_ROTATED_DETECTOR_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/segmentor.cpp b/csrc/mmdeploy/apis/c/mmdeploy/segmentor.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c982df39e58a40cc6140d4e5f9b1252b07636ad0 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/segmentor.cpp @@ -0,0 +1,128 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/segmentor.h" + +#include "mmdeploy/codebase/mmseg/mmseg.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/device.h" +#include "mmdeploy/core/graph.h" +#include "mmdeploy/core/mat.h" +#include "mmdeploy/core/mpl/structure.h" +#include "mmdeploy/core/tensor.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/handle.h" +#include "mmdeploy/pipeline.h" + +using namespace std; +using namespace mmdeploy; + +using ResultType = mmdeploy::Structure; + +int mmdeploy_segmentor_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_segmentor_t* segmentor) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_segmentor_create_v2(model, context, segmentor); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_segmentor_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_segmentor_t* segmentor) { + mmdeploy_model_t model{}; + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_segmentor_create(model, device_name, device_id, segmentor); + mmdeploy_model_destroy(model); + return ec; +} + +int mmdeploy_segmentor_apply(mmdeploy_segmentor_t segmentor, const mmdeploy_mat_t* mats, + int mat_count, mmdeploy_segmentation_t** results) { + wrapped input; + if (auto ec = mmdeploy_segmentor_create_input(mats, mat_count, input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_segmentor_apply_v2(segmentor, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_segmentor_get_result(output, results)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_segmentor_release_result(mmdeploy_segmentation_t* results, int count) { + ResultType deleter(static_cast(count), results); +} + +void mmdeploy_segmentor_destroy(mmdeploy_segmentor_t segmentor) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)segmentor); +} + +int mmdeploy_segmentor_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_segmentor_t* segmentor) { + return mmdeploy_pipeline_create_from_model(model, context, (mmdeploy_pipeline_t*)segmentor); +} + +int mmdeploy_segmentor_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* value) { + return mmdeploy_common_create_input(mats, mat_count, value); +} + +int mmdeploy_segmentor_apply_v2(mmdeploy_segmentor_t segmentor, mmdeploy_value_t input, + mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)segmentor, input, output); +} + +int mmdeploy_segmentor_apply_async(mmdeploy_segmentor_t segmentor, mmdeploy_sender_t input, + mmdeploy_sender_t* output) { + return mmdeploy_pipeline_apply_async((mmdeploy_pipeline_t)segmentor, input, output); +} + +int mmdeploy_segmentor_get_result(mmdeploy_value_t output, mmdeploy_segmentation_t** results) { + try { + const auto& value = Cast(output)->front(); + size_t image_count = value.size(); + + ResultType r(image_count); + auto [results_data, buffers] = r.pointers(); + + auto results_ptr = results_data; + + for (auto i = 0; i < image_count; ++i, ++results_ptr) { + auto& output_item = value[i]; + MMDEPLOY_DEBUG("the {}-th item in output: {}", i, output_item); + auto segmentor_output = from_value(output_item); + results_ptr->height = segmentor_output.height; + results_ptr->width = segmentor_output.width; + results_ptr->classes = segmentor_output.classes; + auto& mask = segmentor_output.mask; + auto& score = segmentor_output.score; + results_ptr->mask = nullptr; + results_ptr->score = nullptr; + if (mask.shape().size()) { + results_ptr->mask = mask.data(); + buffers[i] = mask.buffer(); + } else { + results_ptr->score = score.data(); + buffers[i] = score.buffer(); + } + } + + *results = results_data; + r.release(); + + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("exception caught: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/segmentor.h b/csrc/mmdeploy/apis/c/mmdeploy/segmentor.h new file mode 100644 index 0000000000000000000000000000000000000000..65bcfd03f305948de562c2324d6b1539cc6de9ce --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/segmentor.h @@ -0,0 +1,106 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file segmentor.h + * @brief Interface to MMSegmentation task + */ + +#ifndef MMDEPLOY_SEGMENTOR_H +#define MMDEPLOY_SEGMENTOR_H + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_segmentation_t { + int height; ///< height of \p mask that equals to the input image's height + int width; ///< width of \p mask that equals to the input image's width + int classes; ///< the number of labels in \p mask + int* mask; ///< segmentation mask of the input image, in which mask[i * width + j] indicates + ///< the label id of pixel at (i, j), this field might be null + float* score; ///< segmentation score map of the input image in CHW format, in which + ///< score[height * width * k + i * width + j] indicates the score + ///< of class k at pixel (i, j), this field might be null +} mmdeploy_segmentation_t; + +typedef struct mmdeploy_segmentor* mmdeploy_segmentor_t; + +/** + * @brief Create segmentor's handle + * @param[in] model an instance of mmsegmentation sdk model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] segmentor instance of a segmentor, which must be destroyed + * by \ref mmdeploy_segmentor_destroy + * @return status of creating segmentor's handle + */ +MMDEPLOY_API int mmdeploy_segmentor_create(mmdeploy_model_t model, const char* device_name, + int device_id, mmdeploy_segmentor_t* segmentor); + +/** + * @brief Create segmentor's handle + * @param[in] model_path path of mmsegmentation sdk model exported by mmdeploy model converter + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] segmentor instance of a segmentor, which must be destroyed + * by \ref mmdeploy_segmentor_destroy + * @return status of creating segmentor's handle + */ +MMDEPLOY_API int mmdeploy_segmentor_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_segmentor_t* segmentor); + +/** + * @brief Apply segmentor to batch images and get their inference results + * @param[in] segmentor segmentor's handle created by \ref mmdeploy_segmentor_create_by_path or \ref + * mmdeploy_segmentor_create + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @param[out] results a linear buffer of length \p mat_count to save segmentation result of each + * image. It must be released by \ref mmdeploy_segmentor_release_result + * @return status of inference + */ +MMDEPLOY_API int mmdeploy_segmentor_apply(mmdeploy_segmentor_t segmentor, + const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_segmentation_t** results); + +/** + * @brief Release result buffer returned by \ref mmdeploy_segmentor_apply + * @param[in] results result buffer + * @param[in] count length of \p results + */ +MMDEPLOY_API void mmdeploy_segmentor_release_result(mmdeploy_segmentation_t* results, int count); + +/** + * @brief Destroy segmentor's handle + * @param[in] segmentor segmentor's handle created by \ref mmdeploy_segmentor_create_by_path + */ +MMDEPLOY_API void mmdeploy_segmentor_destroy(mmdeploy_segmentor_t segmentor); + +/****************************************************************************** + * Experimental asynchronous APIs */ + +MMDEPLOY_API int mmdeploy_segmentor_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_segmentor_t* segmentor); + +MMDEPLOY_API int mmdeploy_segmentor_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* value); + +MMDEPLOY_API int mmdeploy_segmentor_apply_v2(mmdeploy_segmentor_t segmentor, mmdeploy_value_t input, + mmdeploy_value_t* output); + +MMDEPLOY_API int mmdeploy_segmentor_apply_async(mmdeploy_segmentor_t segmentor, + mmdeploy_sender_t input, mmdeploy_sender_t* output); + +MMDEPLOY_API int mmdeploy_segmentor_get_result(mmdeploy_value_t output, + mmdeploy_segmentation_t** results); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_SEGMENTOR_H diff --git a/csrc/mmdeploy/apis/c/mmdeploy/text_detector.cpp b/csrc/mmdeploy/apis/c/mmdeploy/text_detector.cpp new file mode 100644 index 0000000000000000000000000000000000000000..576af077623524a7f5457156d9869260197dfdc5 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/text_detector.cpp @@ -0,0 +1,189 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/text_detector.h" + +#include + +#include "mmdeploy/codebase/mmocr/mmocr.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/model.h" +#include "mmdeploy/core/status_code.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/executor_internal.h" +#include "mmdeploy/model.h" +#include "mmdeploy/pipeline.h" + +using namespace std; +using namespace mmdeploy; + +int mmdeploy_text_detector_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_text_detector_t* detector) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_text_detector_create_v2(model, context, detector); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_text_detector_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_text_detector_t* detector) { + return mmdeploy_pipeline_create_from_model(model, context, (mmdeploy_pipeline_t*)detector); +} + +int mmdeploy_text_detector_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_text_detector_t* detector) { + mmdeploy_model_t model{}; + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_text_detector_create(model, device_name, device_id, detector); + mmdeploy_model_destroy(model); + return ec; +} + +int mmdeploy_text_detector_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* input) { + return mmdeploy_common_create_input(mats, mat_count, input); +} + +int mmdeploy_text_detector_apply(mmdeploy_text_detector_t detector, const mmdeploy_mat_t* mats, + int mat_count, mmdeploy_text_detection_t** results, + int** result_count) { + wrapped input; + if (auto ec = mmdeploy_text_detector_create_input(mats, mat_count, input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_text_detector_apply_v2(detector, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_text_detector_get_result(output, results, result_count)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_text_detector_apply_v2(mmdeploy_text_detector_t detector, mmdeploy_value_t input, + mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)detector, input, output); +} + +int mmdeploy_text_detector_apply_async(mmdeploy_text_detector_t detector, mmdeploy_sender_t input, + mmdeploy_sender_t* output) { + return mmdeploy_pipeline_apply_async((mmdeploy_pipeline_t)detector, input, output); +} + +int mmdeploy_text_detector_get_result(mmdeploy_value_t output, mmdeploy_text_detection_t** results, + int** result_count) { + if (!output || !results || !result_count) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + Value& value = reinterpret_cast(output)->front(); + auto detector_outputs = from_value>(value); + + vector _result_count; + _result_count.reserve(detector_outputs.size()); + for (const auto& det_output : detector_outputs) { + _result_count.push_back((int)det_output.size()); + } + + auto total = std::accumulate(_result_count.begin(), _result_count.end(), 0); + + std::unique_ptr result_count_data(new int[_result_count.size()]{}); + std::copy(_result_count.begin(), _result_count.end(), result_count_data.get()); + + std::unique_ptr result_data( + new mmdeploy_text_detection_t[total]{}); + auto result_ptr = result_data.get(); + + for (const auto& det_output : detector_outputs) { + for (auto i = 0; i < det_output.size(); ++i, ++result_ptr) { + result_ptr->score = det_output[i].score; + auto& bbox = det_output[i].bbox; + for (auto j = 0; j < bbox.size(); j += 2) { + result_ptr->bbox[j / 2].x = bbox[j]; + result_ptr->bbox[j / 2].y = bbox[j + 1]; + } + } + } + + *result_count = result_count_data.release(); + *results = result_data.release(); + + return MMDEPLOY_SUCCESS; + + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return 0; +} + +void mmdeploy_text_detector_release_result(mmdeploy_text_detection_t* results, + const int* result_count, int count) { + delete[] results; + delete[] result_count; +} + +void mmdeploy_text_detector_destroy(mmdeploy_text_detector_t detector) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)detector); +} + +int mmdeploy_text_detector_apply_async_v2(mmdeploy_text_detector_t detector, + const mmdeploy_mat_t* imgs, int img_count, + mmdeploy_text_detector_continue_t cont, void* context, + mmdeploy_sender_t* output) { + mmdeploy_sender_t result_sender{}; + if (auto ec = mmdeploy_text_detector_apply_async_v3(detector, imgs, img_count, &result_sender)) { + return ec; + } + if (auto ec = mmdeploy_text_detector_continue_async(result_sender, cont, context, output)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_text_detector_apply_async_v3(mmdeploy_text_detector_t detector, + const mmdeploy_mat_t* imgs, int img_count, + mmdeploy_sender_t* output) { + wrapped input_val; + if (auto ec = mmdeploy_text_detector_create_input(imgs, img_count, input_val.ptr())) { + return ec; + } + mmdeploy_sender_t input_sndr = mmdeploy_executor_just(input_val); + if (auto ec = mmdeploy_text_detector_apply_async(detector, input_sndr, output)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_text_detector_continue_async(mmdeploy_sender_t input, + mmdeploy_text_detector_continue_t cont, void* context, + mmdeploy_sender_t* output) { + auto sender = Guard([&] { + return Take( + LetValue(Take(input), [fn = cont, context](Value& value) -> TypeErasedSender { + mmdeploy_text_detection_t* results{}; + int* result_count{}; + if (auto ec = mmdeploy_text_detector_get_result(Cast(&value), &results, &result_count)) { + return Just(Value()); + } + value = nullptr; + mmdeploy_sender_t output{}; + if (auto ec = fn(results, result_count, context, &output); ec || !output) { + return Just(Value()); + } + return Take(output); + })); + }); + if (sender) { + *output = sender; + return MMDEPLOY_SUCCESS; + } + return MMDEPLOY_E_FAIL; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/text_detector.h b/csrc/mmdeploy/apis/c/mmdeploy/text_detector.h new file mode 100644 index 0000000000000000000000000000000000000000..a3c38dc6f6393db4f649093925338f75e14e3fd1 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/text_detector.h @@ -0,0 +1,156 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file text_detector.h + * @brief Interface to MMOCR text detection task + */ + +#ifndef MMDEPLOY_TEXT_DETECTOR_H +#define MMDEPLOY_TEXT_DETECTOR_H + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_text_detection_t { + mmdeploy_point_t bbox[4]; ///< a text bounding box of which the vertex are in clock-wise + float score; +} mmdeploy_text_detection_t; + +typedef struct mmdeploy_text_detector* mmdeploy_text_detector_t; + +/** + * @brief Create text-detector's handle + * @param[in] model an instance of mmocr text detection model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] detector instance of a text-detector, which must be destroyed + * by \ref mmdeploy_text_detector_destroy + * @return status of creating text-detector's handle + */ +MMDEPLOY_API int mmdeploy_text_detector_create(mmdeploy_model_t model, const char* device_name, + int device_id, mmdeploy_text_detector_t* detector); + +/** + * @brief Create text-detector's handle + * @param[in] model_path path to text detection model + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device + * @param[out] detector instance of a text-detector, which must be destroyed + * by \ref mmdeploy_text_detector_destroy + * @return status of creating text-detector's handle + */ +MMDEPLOY_API int mmdeploy_text_detector_create_by_path(const char* model_path, + const char* device_name, int device_id, + mmdeploy_text_detector_t* detector); + +/** + * @brief Apply text-detector to batch images and get their inference results + * @param[in] detector text-detector's handle created by \ref mmdeploy_text_detector_create_by_path + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @param[out] results a linear buffer to save text detection results of each + * image. It must be released by calling \ref mmdeploy_text_detector_release_result + * @param[out] result_count a linear buffer of length \p mat_count to save the number of detection + * results of each image. It must be released by \ref mmdeploy_detector_release_result + * @return status of inference + */ +MMDEPLOY_API int mmdeploy_text_detector_apply(mmdeploy_text_detector_t detector, + const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_text_detection_t** results, + int** result_count); + +/** @brief Release the inference result buffer returned by \ref mmdeploy_text_detector_apply + * @param[in] results text detection result buffer + * @param[in] result_count \p results size buffer + * @param[in] count the length of buffer \p result_count + */ +MMDEPLOY_API void mmdeploy_text_detector_release_result(mmdeploy_text_detection_t* results, + const int* result_count, int count); + +/** + * @brief Destroy text-detector's handle + * @param[in] detector text-detector's handle created by \ref mmdeploy_text_detector_create_by_path + * or \ref mmdeploy_text_detector_create + */ +MMDEPLOY_API void mmdeploy_text_detector_destroy(mmdeploy_text_detector_t detector); + +/****************************************************************************** + * Experimental asynchronous APIs */ + +/** + * @brief Same as \ref mmdeploy_text_detector_create, but allows to control execution context of + * tasks via context + */ +MMDEPLOY_API int mmdeploy_text_detector_create_v2(mmdeploy_model_t model, + mmdeploy_context_t context, + mmdeploy_text_detector_t* detector); + +/** + * @brief Pack text-detector inputs into mmdeploy_value_t + * @param[in] mats a batch of images + * @param[in] mat_count number of images in the batch + * @return the created value + */ +MMDEPLOY_API int mmdeploy_text_detector_create_input(const mmdeploy_mat_t* mats, int mat_count, + mmdeploy_value_t* input); + +/** + * @brief Same as \ref mmdeploy_text_detector_apply, but input and output are packed in \ref + * mmdeploy_value_t. + */ +MMDEPLOY_API int mmdeploy_text_detector_apply_v2(mmdeploy_text_detector_t detector, + mmdeploy_value_t input, mmdeploy_value_t* output); + +/** + * @brief Apply text-detector asynchronously + * @param[in] detector handle to the detector + * @param[in] input input sender that will be consumed by the operation + * @return output sender + */ +MMDEPLOY_API int mmdeploy_text_detector_apply_async(mmdeploy_text_detector_t detector, + mmdeploy_sender_t input, + mmdeploy_sender_t* output); + +/** + * @brief Unpack detector output from a mmdeploy_value_t + * @param[in] output output sender returned by applying a detector + * @param[out] results a linear buffer to save detection results of each image. It must be + * released by \ref mmdeploy_text_detector_release_result + * @param[out] result_count a linear buffer with length number of input images to save the + * number of detection results of each image. Must be released by \ref + * mmdeploy_text_detector_release_result + * @return status of the operation + */ +MMDEPLOY_API +int mmdeploy_text_detector_get_result(mmdeploy_value_t output, mmdeploy_text_detection_t** results, + int** result_count); + +typedef int (*mmdeploy_text_detector_continue_t)(mmdeploy_text_detection_t* results, + int* result_count, void* context, + mmdeploy_sender_t* output); + +// MMDEPLOY_API int mmdeploy_text_detector_apply_async_v2(mm_handle_t handle, const mm_mat_t* imgs, +// int img_count, +// mmdeploy_text_detector_continuation_t +// cont, void* context, mmdeploy_sender_t* +// output); + +MMDEPLOY_API int mmdeploy_text_detector_apply_async_v3(mmdeploy_text_detector_t detector, + const mmdeploy_mat_t* imgs, int img_count, + mmdeploy_sender_t* output); + +MMDEPLOY_API int mmdeploy_text_detector_continue_async(mmdeploy_sender_t input, + mmdeploy_text_detector_continue_t cont, + void* context, mmdeploy_sender_t* output); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_TEXT_DETECTOR_H diff --git a/csrc/mmdeploy/apis/c/mmdeploy/text_recognizer.cpp b/csrc/mmdeploy/apis/c/mmdeploy/text_recognizer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3c8cfbb5c6caaf9a3da4b873b91e28bb7da13985 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/text_recognizer.cpp @@ -0,0 +1,252 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/text_recognizer.h" + +#include + +#include "mmdeploy/archive/value_archive.h" +#include "mmdeploy/codebase/mmocr/mmocr.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/device.h" +#include "mmdeploy/core/mat.h" +#include "mmdeploy/core/model.h" +#include "mmdeploy/core/status_code.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/core/value.h" +#include "mmdeploy/executor_internal.h" +#include "mmdeploy/model.h" +#include "mmdeploy/pipeline.h" + +using namespace mmdeploy; + +namespace { + +Value config_template(const Model& model) { + // clang-format off + return { + {"type", "Pipeline"}, + {"input", {"imgs", "bboxes"}}, + { + "tasks", { + { + {"type", "Task"}, + {"module", "WarpBbox"}, + {"input", {"imgs", "bboxes"}}, + {"output", "patches"}, + }, + { + {"type", "Inference"}, + {"input", "patches"}, + {"output", "texts"}, + {"params", {{"model", model}}}, + } + } + }, + {"output", "texts"}, + }; + // clang-format on +} + +} // namespace + +int mmdeploy_text_recognizer_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_text_recognizer_t* recognizer) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_text_recognizer_create_v2(model, context, recognizer); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_text_recognizer_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_text_recognizer_t* recognizer) { + auto config = config_template(*Cast(model)); + return mmdeploy_pipeline_create_v3(Cast(&config), context, (mmdeploy_pipeline_t*)recognizer); +} + +int mmdeploy_text_recognizer_create_by_path(const char* model_path, const char* device_name, + int device_id, mmdeploy_text_recognizer_t* recognizer) { + mmdeploy_model_t model{}; + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_text_recognizer_create(model, device_name, device_id, recognizer); + mmdeploy_model_destroy(model); + return ec; +} + +int mmdeploy_text_recognizer_apply(mmdeploy_text_recognizer_t recognizer, + const mmdeploy_mat_t* images, int count, + mmdeploy_text_recognition_t** results) { + return mmdeploy_text_recognizer_apply_bbox(recognizer, images, count, nullptr, nullptr, results); +} + +int mmdeploy_text_recognizer_create_input(const mmdeploy_mat_t* images, int image_count, + const mmdeploy_text_detection_t* bboxes, + const int* bbox_count, mmdeploy_value_t* output) { + if (image_count && images == nullptr) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + Value::Array input_images; + Value::Array input_bboxes; + + auto add_bbox = [&](Mat img, const mmdeploy_text_detection_t* det) { + if (det) { + const auto& b = det->bbox; + Value::Array bbox{b[0].x, b[0].y, b[1].x, b[1].y, b[2].x, b[2].y, b[3].x, b[3].y}; + input_bboxes.push_back({{"bbox", std::move(bbox)}}); + } else { + input_bboxes.push_back(nullptr); + } + input_images.push_back({{"ori_img", img}}); + }; + + for (int i = 0; i < image_count; ++i) { + auto _mat = Cast(images[i]); + if (bboxes && bbox_count) { + for (int j = 0; j < bbox_count[i]; ++j) { + add_bbox(_mat, bboxes++); + } + } else { // inference with whole image + add_bbox(_mat, nullptr); + } + } + + *output = Take(Value{std::move(input_images), std::move(input_bboxes)}); + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("exception caught: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} + +int mmdeploy_text_recognizer_apply_bbox(mmdeploy_text_recognizer_t recognizer, + const mmdeploy_mat_t* images, int image_count, + const mmdeploy_text_detection_t* bboxes, + const int* bbox_count, + mmdeploy_text_recognition_t** results) { + wrapped input; + if (auto ec = mmdeploy_text_recognizer_create_input(images, image_count, bboxes, bbox_count, + input.ptr())) { + return ec; + } + wrapped output; + if (auto ec = mmdeploy_text_recognizer_apply_v2(recognizer, input, output.ptr())) { + return ec; + } + if (auto ec = mmdeploy_text_recognizer_get_result(output, results)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_text_recognizer_apply_v2(mmdeploy_text_recognizer_t recognizer, mmdeploy_value_t input, + mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)recognizer, input, output); +} + +int mmdeploy_text_recognizer_apply_async(mmdeploy_text_recognizer_t recognizer, + mmdeploy_sender_t input, mmdeploy_sender_t* output) { + return mmdeploy_pipeline_apply_async((mmdeploy_pipeline_t)recognizer, input, output); +} + +MMDEPLOY_API int mmdeploy_text_recognizer_get_result(mmdeploy_value_t output, + mmdeploy_text_recognition_t** results) { + if (!output || !results) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + std::vector recognitions; + from_value(Cast(output)->front(), recognitions); + + size_t count = recognitions.size(); + + auto deleter = [&](mmdeploy_text_recognition_t* p) { + mmdeploy_text_recognizer_release_result(p, static_cast(count)); + }; + + std::unique_ptr _results( + new mmdeploy_text_recognition_t[count]{}, deleter); + + size_t result_idx = 0; + for (const auto& bbox_result : recognitions) { + auto& res = _results[result_idx++]; + + auto& score = bbox_result.score; + res.length = static_cast(score.size()); + + res.score = new float[score.size()]; + std::copy_n(score.data(), score.size(), res.score); + + auto text = bbox_result.text; + res.text = new char[text.length() + 1]; + std::copy_n(text.data(), text.length() + 1, res.text); + } + + *results = _results.release(); + } catch (const std::exception& e) { + MMDEPLOY_ERROR("exception caught: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_text_recognizer_release_result(mmdeploy_text_recognition_t* results, int count) { + for (int i = 0; i < count; ++i) { + delete[] results[i].score; + delete[] results[i].text; + } + delete[] results; +} + +void mmdeploy_text_recognizer_destroy(mmdeploy_text_recognizer_t recognizer) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)recognizer); +} + +int mmdeploy_text_recognizer_apply_async_v3(mmdeploy_text_recognizer_t recognizer, + const mmdeploy_mat_t* imgs, int img_count, + const mmdeploy_text_detection_t* bboxes, + const int* bbox_count, mmdeploy_sender_t* output) { + wrapped input_val; + if (auto ec = mmdeploy_text_recognizer_create_input(imgs, img_count, bboxes, bbox_count, + input_val.ptr())) { + return ec; + } + mmdeploy_sender_t input_sndr = mmdeploy_executor_just(input_val); + if (auto ec = mmdeploy_text_recognizer_apply_async(recognizer, input_sndr, output)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_text_recognizer_continue_async(mmdeploy_sender_t input, + mmdeploy_text_recognizer_continue_t cont, void* context, + mmdeploy_sender_t* output) { + auto sender = Guard([&] { + return Take( + LetValue(Take(input), [fn = cont, context](Value& value) -> TypeErasedSender { + mmdeploy_text_recognition_t* results{}; + if (auto ec = mmdeploy_text_recognizer_get_result(Cast(&value), &results)) { + return Just(Value()); + } + value = nullptr; + mmdeploy_sender_t output{}; + if (auto ec = fn(results, context, &output); ec || !output) { + return Just(Value()); + } + return Take(output); + })); + }); + if (sender) { + *output = sender; + return MMDEPLOY_SUCCESS; + } + return MMDEPLOY_E_FAIL; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/text_recognizer.h b/csrc/mmdeploy/apis/c/mmdeploy/text_recognizer.h new file mode 100644 index 0000000000000000000000000000000000000000..6c18928242222d7a84786e7826eb0de22e2c6ed5 --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/text_recognizer.h @@ -0,0 +1,164 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file text_recognizer.h + * @brief Interface to MMOCR text recognition task + */ + +#ifndef MMDEPLOY_SRC_APIS_C_TEXT_RECOGNIZER_H_ +#define MMDEPLOY_SRC_APIS_C_TEXT_RECOGNIZER_H_ + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/text_detector.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_text_recognition_t { + char* text; + float* score; + int length; +} mmdeploy_text_recognition_t; + +typedef struct mmdeploy_text_recognizer* mmdeploy_text_recognizer_t; + +/** + * @brief Create a text recognizer instance + * @param[in] model an instance of mmocr text recognition model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] recognizer handle of the created text recognizer, which must be destroyed + * by \ref mmdeploy_text_recognizer_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_text_recognizer_create(mmdeploy_model_t model, const char* device_name, + int device_id, + mmdeploy_text_recognizer_t* recognizer); + +/** + * @brief Create a text recognizer instance + * @param[in] model_path path to text recognition model + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] recognizer handle of the created text recognizer, which must be destroyed + * by \ref mmdeploy_text_recognizer_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_text_recognizer_create_by_path(const char* model_path, + const char* device_name, int device_id, + mmdeploy_text_recognizer_t* recognizer); + +/** + * @brief Apply text recognizer to a batch of text images + * @param[in] recognizer text recognizer's handle created by \ref + * mmdeploy_text_recognizer_create_by_path + * @param[in] images a batch of text images + * @param[in] count number of images in the batch + * @param[out] results a linear buffer contains the recognized text, must be release + * by \ref mmdeploy_text_recognizer_release_result + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_text_recognizer_apply(mmdeploy_text_recognizer_t recognizer, + const mmdeploy_mat_t* images, int count, + mmdeploy_text_recognition_t** results); + +/** + * @brief Apply text recognizer to a batch of images supplied with text bboxes + * @param[in] recognizer text recognizer's handle created by \ref + * mmdeploy_text_recognizer_create_by_path + * @param[in] images a batch of text images + * @param[in] image_count number of images in the batch + * @param[in] bboxes bounding boxes detected by text detector + * @param[in] bbox_count number of bboxes of each \p images, must be same length as \p images + * @param[out] results a linear buffer contains the recognized text, which has the same length as \p + * bboxes, must be release by \ref mmdeploy_text_recognizer_release_result + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_text_recognizer_apply_bbox(mmdeploy_text_recognizer_t recognizer, + const mmdeploy_mat_t* images, int image_count, + const mmdeploy_text_detection_t* bboxes, + const int* bbox_count, + mmdeploy_text_recognition_t** results); + +/** @brief Release result buffer returned by \ref mmdeploy_text_recognizer_apply or \ref + * mmdeploy_text_recognizer_apply_bbox + * @param[in] results result buffer by text recognizer + * @param[in] count length of \p result + */ +MMDEPLOY_API void mmdeploy_text_recognizer_release_result(mmdeploy_text_recognition_t* results, + int count); + +/** + * @brief destroy text recognizer + * @param[in] recognizer handle of text recognizer created by \ref + * mmdeploy_text_recognizer_create_by_path or \ref mmdeploy_text_recognizer_create + */ +MMDEPLOY_API void mmdeploy_text_recognizer_destroy(mmdeploy_text_recognizer_t recognizer); + +/****************************************************************************** + * Experimental asynchronous APIs */ + +/** + * @brief Same as \ref mmdeploy_text_recognizer_create, but allows to control execution context of + * tasks via context + */ +MMDEPLOY_API int mmdeploy_text_recognizer_create_v2(mmdeploy_model_t model, + mmdeploy_context_t context, + mmdeploy_text_recognizer_t* recognizer); + +/** + * @brief Pack text-recognizer inputs into mmdeploy_value_t + * @param[in] images a batch of images + * @param[in] image_count number of images in the batch + * @param[in] bboxes bounding boxes detected by text detector + * @param[in] bbox_count number of bboxes of each \p images, must be same length as \p images + * @return value created + */ +MMDEPLOY_API int mmdeploy_text_recognizer_create_input(const mmdeploy_mat_t* images, + int image_count, + const mmdeploy_text_detection_t* bboxes, + const int* bbox_count, + mmdeploy_value_t* output); + +MMDEPLOY_API int mmdeploy_text_recognizer_apply_v2(mmdeploy_text_recognizer_t recognizer, + mmdeploy_value_t input, + mmdeploy_value_t* output); + +/** + * @brief Same as \ref mmdeploy_text_recognizer_apply_bbox, but input and output are packed in \ref + * mmdeploy_value_t. + */ +MMDEPLOY_API int mmdeploy_text_recognizer_apply_async(mmdeploy_text_recognizer_t recognizer, + mmdeploy_sender_t input, + mmdeploy_sender_t* output); + +typedef int (*mmdeploy_text_recognizer_continue_t)(mmdeploy_text_recognition_t* results, + void* context, mmdeploy_sender_t* output); + +MMDEPLOY_API int mmdeploy_text_recognizer_apply_async_v3(mmdeploy_text_recognizer_t recognizer, + const mmdeploy_mat_t* imgs, int img_count, + const mmdeploy_text_detection_t* bboxes, + const int* bbox_count, + mmdeploy_sender_t* output); + +MMDEPLOY_API int mmdeploy_text_recognizer_continue_async(mmdeploy_sender_t input, + mmdeploy_text_recognizer_continue_t cont, + void* context, mmdeploy_sender_t* output); + +/** + * @brief Unpack text-recognizer output from a mmdeploy_value_t + * @param[in] output + * @param[out] results + * @return status of the operation + */ +MMDEPLOY_API int mmdeploy_text_recognizer_get_result(mmdeploy_value_t output, + mmdeploy_text_recognition_t** results); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_SRC_APIS_C_TEXT_RECOGNIZER_H_ diff --git a/csrc/mmdeploy/apis/c/mmdeploy/video_recognizer.cpp b/csrc/mmdeploy/apis/c/mmdeploy/video_recognizer.cpp new file mode 100644 index 0000000000000000000000000000000000000000..de71e5784269c934e4aa480ec04717c85a999ffd --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/video_recognizer.cpp @@ -0,0 +1,165 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +#include "mmdeploy/video_recognizer.h" + +#include +#include + +#include "mmdeploy/archive/value_archive.h" +#include "mmdeploy/codebase/mmaction/mmaction.h" +#include "mmdeploy/common_internal.h" +#include "mmdeploy/core/device.h" +#include "mmdeploy/core/mat.h" +#include "mmdeploy/core/model.h" +#include "mmdeploy/core/status_code.h" +#include "mmdeploy/core/utils/formatter.h" +#include "mmdeploy/core/value.h" +#include "mmdeploy/executor_internal.h" +#include "mmdeploy/model.h" +#include "mmdeploy/pipeline.h" + +using namespace mmdeploy; + +int mmdeploy_video_recognizer_create(mmdeploy_model_t model, const char* device_name, int device_id, + mmdeploy_video_recognizer_t* recognizer) { + mmdeploy_context_t context{}; + auto ec = mmdeploy_context_create_by_device(device_name, device_id, &context); + if (ec != MMDEPLOY_SUCCESS) { + return ec; + } + ec = mmdeploy_video_recognizer_create_v2(model, context, recognizer); + mmdeploy_context_destroy(context); + return ec; +} + +int mmdeploy_video_recognizer_create_by_path(const char* model_path, const char* device_name, + int device_id, + mmdeploy_video_recognizer_t* recognizer) { + mmdeploy_model_t model{}; + + if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) { + return ec; + } + auto ec = mmdeploy_video_recognizer_create(model, device_name, device_id, recognizer); + mmdeploy_model_destroy(model); + return ec; +} +int mmdeploy_video_recognizer_apply(mmdeploy_video_recognizer_t recognizer, + const mmdeploy_mat_t* images, + const mmdeploy_video_sample_info_t* video_info, int video_count, + mmdeploy_video_recognition_t** results, int** result_count) { + wrapped input; + if (auto ec = + mmdeploy_video_recognizer_create_input(images, video_info, video_count, input.ptr())) { + return ec; + } + + wrapped output; + if (auto ec = mmdeploy_video_recognizer_apply_v2(recognizer, input, output.ptr())) { + return ec; + } + + if (auto ec = mmdeploy_video_recognizer_get_result(output, results, result_count)) { + return ec; + } + return MMDEPLOY_SUCCESS; +} + +void mmdeploy_video_recognizer_release_result(mmdeploy_video_recognition_t* results, + int* result_count, int video_count) { + delete[] results; + delete[] result_count; +} + +void mmdeploy_video_recognizer_destroy(mmdeploy_video_recognizer_t recognizer) { + mmdeploy_pipeline_destroy((mmdeploy_pipeline_t)recognizer); +} + +int mmdeploy_video_recognizer_create_v2(mmdeploy_model_t model, mmdeploy_context_t context, + mmdeploy_video_recognizer_t* recognizer) { + return mmdeploy_pipeline_create_from_model(model, context, (mmdeploy_pipeline_t*)recognizer); +} + +int mmdeploy_video_recognizer_create_input(const mmdeploy_mat_t* images, + const mmdeploy_video_sample_info_t* video_info, + int video_count, mmdeploy_value_t* value) { + if (video_count && (images == nullptr || video_info == nullptr)) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + auto input = std::make_unique(Value{Value::kArray}); + auto sample = std::make_unique(Value::kArray); + for (int i = 0; i < video_count; ++i) { + int clip_len = video_info[i].clip_len; + int num_clips = video_info[i].num_clips; + int n_mat = clip_len * num_clips; + for (int j = 0; j < n_mat; j++) { + mmdeploy::Mat _mat{images[j].height, + images[j].width, + PixelFormat(images[j].format), + DataType(images[j].type), + images[j].data, + images[j].device ? *(const Device*)(images[j].device) : Device{0}}; + sample->push_back({{"ori_img", _mat}, {"clip_len", clip_len}, {"num_clips", num_clips}}); + } + input->front().push_back(std::move(*sample.release())); + } + *value = Cast(input.release()); + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_SUCCESS; +} + +int mmdeploy_video_recognizer_apply_v2(mmdeploy_video_recognizer_t recognizer, + mmdeploy_value_t input, mmdeploy_value_t* output) { + return mmdeploy_pipeline_apply((mmdeploy_pipeline_t)recognizer, input, output); +} + +int mmdeploy_video_recognizer_get_result(mmdeploy_value_t output, + mmdeploy_video_recognition_t** results, + int** result_count) { + if (!output || !results || !result_count) { + return MMDEPLOY_E_INVALID_ARG; + } + try { + Value& value = Cast(output)->front(); + + auto classify_outputs = from_value>(value); + + std::vector _result_count; + _result_count.reserve(classify_outputs.size()); + + for (const auto& cls_output : classify_outputs) { + _result_count.push_back((int)cls_output.size()); + } + + auto total = std::accumulate(begin(_result_count), end(_result_count), 0); + + std::unique_ptr result_count_data(new int[_result_count.size()]{}); + std::copy(_result_count.begin(), _result_count.end(), result_count_data.get()); + + std::unique_ptr result_data( + new mmdeploy_video_recognition_t[total]{}); + auto result_ptr = result_data.get(); + for (const auto& cls_output : classify_outputs) { + for (const auto& label : cls_output) { + result_ptr->label_id = label.label_id; + result_ptr->score = label.score; + ++result_ptr; + } + } + + *result_count = result_count_data.release(); + *results = result_data.release(); + + return MMDEPLOY_SUCCESS; + } catch (const std::exception& e) { + MMDEPLOY_ERROR("unhandled exception: {}", e.what()); + } catch (...) { + MMDEPLOY_ERROR("unknown exception caught"); + } + return MMDEPLOY_E_FAIL; +} diff --git a/csrc/mmdeploy/apis/c/mmdeploy/video_recognizer.h b/csrc/mmdeploy/apis/c/mmdeploy/video_recognizer.h new file mode 100644 index 0000000000000000000000000000000000000000..e98b2bd07e197e8f436608b5d6089d31575a3dfe --- /dev/null +++ b/csrc/mmdeploy/apis/c/mmdeploy/video_recognizer.h @@ -0,0 +1,139 @@ +// Copyright (c) OpenMMLab. All rights reserved. + +/** + * @file video_recognizer.h + * @brief Interface to MMACTION video recognition task + */ + +#ifndef MMDEPLOY_VIDEO_RECOGNIZER_H +#define MMDEPLOY_VIDEO_RECOGNIZER_H + +#include "mmdeploy/common.h" +#include "mmdeploy/executor.h" +#include "mmdeploy/model.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mmdeploy_video_recognition_t { + int label_id; + float score; +} mmdeploy_video_recognition_t; + +typedef struct mmdeploy_video_sample_info_t { + int clip_len; + int num_clips; +} mmdeploy_video_sample_info_t; + +typedef struct mmdeploy_video_recognizer* mmdeploy_video_recognizer_t; + +/** + * @brief Create video recognizer's handle + * @param[in] model an instance of mmaction sdk model created by + * \ref mmdeploy_model_create_by_path or \ref mmdeploy_model_create in \ref model.h + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] recognizer handle of the created video recognizer, which must be destroyed + * by \ref mmdeploy_video_recognizer_destroy + * @return status of creating video recognizer's handle + */ +MMDEPLOY_API int mmdeploy_video_recognizer_create(mmdeploy_model_t model, const char* device_name, + int device_id, + mmdeploy_video_recognizer_t* recognizer); + +/** + * @brief Create a video recognizer instance + * @param[in] model_path path to video recognition model + * @param[in] device_name name of device, such as "cpu", "cuda", etc. + * @param[in] device_id id of device. + * @param[out] recognizer handle of the created video recognizer, which must be destroyed + * by \ref mmdeploy_video_recognizer_destroy + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_video_recognizer_create_by_path(const char* model_path, + const char* device_name, int device_id, + mmdeploy_video_recognizer_t* recognizer); + +/** + * @brief Apply video recognizer to a batch of videos + * @param[in] recognizer video recognizer's handle created by \ref + * mmdeploy_video_recognizer_create_by_path + * @param[in] images a batch of videos + * @param[in] video_info video information of each video + * @param[in] video_count number of videos + * @param[out] results a linear buffer contains the recognized video, must be release + * by \ref mmdeploy_video_recognizer_release_result + * @param[out] result_count a linear buffer with length being \p video_count to save the number of + * recognition results of each video. It must be released by \ref + * mmdeploy_video_recognizer_release_result + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_video_recognizer_apply(mmdeploy_video_recognizer_t recognizer, + const mmdeploy_mat_t* images, + const mmdeploy_video_sample_info_t* video_info, + int video_count, + mmdeploy_video_recognition_t** results, + int** result_count); + +/** @brief Release result buffer returned by \ref mmdeploy_video_recognizer_apply + * @param[in] results result buffer by video recognizer + * @param[in] result_count \p results size buffer + * @param[in] video_count length of \p result_count + */ +MMDEPLOY_API void mmdeploy_video_recognizer_release_result(mmdeploy_video_recognition_t* results, + int* result_count, int video_count); + +/** + * @brief destroy video recognizer + * @param[in] recognizer handle of video recognizer created by \ref + * mmdeploy_video_recognizer_create_by_path or \ref mmdeploy_video_recognizer_create + */ +MMDEPLOY_API void mmdeploy_video_recognizer_destroy(mmdeploy_video_recognizer_t recognizer); + +/** + * @brief Same as \ref mmdeploy_video_recognizer_create, but allows to control execution context of + * tasks via context + */ +MMDEPLOY_API int mmdeploy_video_recognizer_create_v2(mmdeploy_model_t model, + mmdeploy_context_t context, + mmdeploy_video_recognizer_t* recognizer); + +/** + * @brief Pack video recognizer inputs into mmdeploy_value_t + * @param[in] images a batch of videos + * @param[in] video_info video information of each video + * @param[in] video_count number of videos in the batch + * @param[out] value created value + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_video_recognizer_create_input( + const mmdeploy_mat_t* images, const mmdeploy_video_sample_info_t* video_info, int video_count, + mmdeploy_value_t* value); + +/** + * @brief Apply video recognizer to a batch of videos + * @param[in] input packed input + * @param[out] output inference output + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_video_recognizer_apply_v2(mmdeploy_video_recognizer_t recognizer, + mmdeploy_value_t input, + mmdeploy_value_t* output); + +/** + * @brief Apply video recognizer to a batch of videos + * @param[in] output inference output + * @param[out] results structured output + * @param[out] result_count number of each videos + * @return status code of the operation + */ +MMDEPLOY_API int mmdeploy_video_recognizer_get_result(mmdeploy_value_t output, + mmdeploy_video_recognition_t** results, + int** result_count); + +#ifdef __cplusplus +} +#endif + +#endif // MMDEPLOY_VIDEO_RECOGNIZER_H diff --git a/csrc/mmdeploy/apis/csharp/MMDeploy.sln b/csrc/mmdeploy/apis/csharp/MMDeploy.sln new file mode 100644 index 0000000000000000000000000000000000000000..72cbead791feac01bdd0e248e4bb49b3c4a3db2b --- /dev/null +++ b/csrc/mmdeploy/apis/csharp/MMDeploy.sln @@ -0,0 +1,50 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.31729.503 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MMDeploy", "MMDeploy\MMDeployCSharp.csproj", "{3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}" +EndProject + +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Debug|x64.ActiveCfg = Debug|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Debug|x64.Build.0 = Debug|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Debug|x86.ActiveCfg = Debug|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Debug|x86.Build.0 = Debug|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Release|Any CPU.Build.0 = Release|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Release|x64.ActiveCfg = Release|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Release|x64.Build.0 = Release|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Release|x86.ActiveCfg = Release|Any CPU + {3DC914EB-A8FB-4A89-A7CF-7DF9CC5284A6}.Release|x86.Build.0 = Release|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Debug|x64.ActiveCfg = Debug|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Debug|x64.Build.0 = Debug|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Debug|x86.ActiveCfg = Debug|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Debug|x86.Build.0 = Debug|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Release|Any CPU.Build.0 = Release|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Release|x64.ActiveCfg = Release|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Release|x64.Build.0 = Release|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Release|x86.ActiveCfg = Release|Any CPU + {661A4295-68CF-41C6-85B8-B11748113A6F}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {5E63FEFB-A55D-4BEB-83B8-7D0E5A59EBF7} + EndGlobalSection +EndGlobal diff --git a/csrc/mmdeploy/apis/csharp/MMDeploy/.editorconfig b/csrc/mmdeploy/apis/csharp/MMDeploy/.editorconfig new file mode 100644 index 0000000000000000000000000000000000000000..2a769013fdf4f769475de9ee987fe5268485dfb5 --- /dev/null +++ b/csrc/mmdeploy/apis/csharp/MMDeploy/.editorconfig @@ -0,0 +1,110 @@ +[*.cs] +csharp_style_var_for_built_in_types=true:silent +csharp_style_var_when_type_is_apparent=true:silent +csharp_style_var_elsewhere=true:silent + +## +## StyleCop.Analyzers +## + +# Using directive should appear within a namespace declaration +dotnet_diagnostic.SA1200.severity = None + +# XML comment analysis is disabled due to project configuration +dotnet_diagnostic.SA0001.severity = None + +# The file header is missing or not located at the Top of the file +dotnet_diagnostic.SA1633.severity = None + +# Use string.Empty for empty strings +dotnet_diagnostic.SA1122.severity = None + +# Variable '_' should begin with lower-case letter +dotnet_diagnostic.SA1312.severity = None + +# Parameter '_' should begin with lower-case letter +dotnet_diagnostic.SA1313.severity = None + +# Elements should be documented +dotnet_diagnostic.SA1600.severity = None + +# Prefix local calls with this +dotnet_diagnostic.SA1101.severity = None + +# 'public' members should come before 'private' members +dotnet_diagnostic.SA1202.severity = None + +# Comments should contain text +dotnet_diagnostic.SA1120.severity = None + +# Constant fields should appear before non-constant fields +dotnet_diagnostic.SA1203.severity = None + +# Field '_blah' should not begin with an underscore +dotnet_diagnostic.SA1309.severity = None + +# Use trailing comma in multi-line initializers +dotnet_diagnostic.SA1413.severity = None + +# A method should not follow a class +dotnet_diagnostic.SA1201.severity = None + +# Elements should be separated by blank line +dotnet_diagnostic.SA1516.severity = None + +# The parameter spans multiple lines +dotnet_diagnostic.SA1118.severity = None + +# Static members should appear before non-static members +dotnet_diagnostic.SA1204.severity = None + +# Put constructor initializers on their own line +dotnet_diagnostic.SA1128.severity = None + +# Opening braces should not be preceded by blank line +dotnet_diagnostic.SA1509.severity = None + +# The parameter should begin on the line after the previous parameter +dotnet_diagnostic.SA1115.severity = None + +# File name should match first type name +dotnet_diagnostic.SA1649.severity = None + +# File may only contain a single type +dotnet_diagnostic.SA1402.severity = None + +# Enumeration items should be documented +dotnet_diagnostic.SA1602.severity = None + +# Element should not be on a single line +dotnet_diagnostic.SA1502.severity = None + +# Closing parenthesis should not be preceded by a space +dotnet_diagnostic.SA1009.severity = None + +# Closing parenthesis should be on line of last parameter +dotnet_diagnostic.SA1111.severity = None + +# Braces should not be ommitted +dotnet_diagnostic.SA1503.severity = None + +# The name of a C# element does not begin with an upper-case letter +# dotnet_diagnostic.SA1300.severity = None + +# The name of a public or internal field in C# does not begin with an upper-case letter +# dotnet_diagnostic.SA1307.severity = None + +# The code uses one of the basic C# types, but does not use the built-in alias for the type +# dotnet_diagnostic.SA1121.severity = None + +# Two or more attributes appeared within the same set of square brackets. +dotnet_diagnostic.SA1133.severity = None + +# The C# code contains a region. +dotnet_diagnostic.SA1124.severity = None + +# The parameters to a C# method or indexer call or declaration span across multiple lines, but the first parameter does not start on the line after the opening bracket. +dotnet_diagnostic.SA1116.severity = None + +# The parameters to a C# method or indexer call or declaration are not all on the same line or each on a separate line. +dotnet_diagnostic.SA1117.severity = None diff --git a/csrc/mmdeploy/apis/csharp/MMDeploy/APIs/Classifier.cs b/csrc/mmdeploy/apis/csharp/MMDeploy/APIs/Classifier.cs new file mode 100644 index 0000000000000000000000000000000000000000..9a2e4d1c058a2051dbbb92f14aea804ce8b5c107 --- /dev/null +++ b/csrc/mmdeploy/apis/csharp/MMDeploy/APIs/Classifier.cs @@ -0,0 +1,134 @@ +using System.Collections.Generic; + +namespace MMDeploy +{ + /// + /// Single classification result of a picture. + /// A picture may contains multiple reuslts. + /// + public struct Label + { + /// + /// Id. + /// + public int Id; + + /// + /// Score. + /// + public float Score; + + /// + /// Initializes a new instance of the struct. + /// + /// id. + /// score. + public Label(int id, float score) + { + Id = id; + Score = score; + } + } + + /// + /// Output of Classifier. + /// + public struct ClassifierOutput + { + /// + /// Classification results for single image. + /// + public List