mirror of
https://github.com/open-mmlab/mmdeploy.git
synced 2025-01-14 08:09:43 +08:00
* executor prototype * add split/when_all * fix GCC build * WIP let_value * fix let_value * WIP ensure_started * ensure_started & start_detached * fix let_value + when_all combo on MSVC 142 * fix static thread pool * generic just, then, let_value, sync_wait * minor * generic split and when_all * fully generic sender adapters * when_all: workaround for GCC7 * support legacy spdlog * fix memleak * bulk * static detector * fix bulk & first pipeline * bulk for static thread pools * fix on MSVC * WIP async batch submission * WIP collation * async batch * fix detector * fix async detector * fix * fix * debug * fix cuda allocator * WIP type erased executor * better type erasure * simplify C API impl * Expand & type erase TC * deduction guide for type erased senders * fix GCC build * when_all for arrays of Value senders * WIP pipeline v2 * WIP pipeline parser * WIP timed batch operation * add registry * experiment * fix pipeline * naming * fix mem-leak * fix deferred batch operation * WIP * WIP configurable scheduler * WIP configurable scheduler * add comment * parse scheduler config * force link schedulers * WIP pipeable sender * WIP CPO * ADL isolation and dismantle headers * type erase single thread context * fix MSVC build * CPO * replace decay_t with remove_cvref_t * structure adjustment * structure adjustment * apply CPOs & C API rework * refine C API * detector async C API * adjust detector async C API * # Conflicts: # csrc/apis/c/detector.cpp * fix when_all for type erased senders * support void return for Then * async detector * fix some CPOs * minor * WIP rework capture mechanism for type erased types * minor fix * fix MSVC build * move expand.h to execution * make `Expand` pipeable * fix type erased * un-templatize `_TypeErasedOperation` * re-work C API * remove async_detector C API * fix pipeline * add flatten & unflatten * fix flatten & unflatten * add aync OCR demo * config executor for nodes & better executor API * working async OCR example * minor * dynamic batch via scheduler * dynamic batch on `Value` * fix MSVC build * type erase dynamic batch scheduler * sender as Python Awaitable * naming * naming * add docs * minor * merge tmp branch * unify C APIs * fix ocr * unify APIs * fix typo * update async OCR demo * add v3 API text recognizer * fix v3 API * fix lint * add license info & reformat * add demo async_ocr_v2 * revert files * revert files * resolve link issues * fix scheduler linkage for shared libs * fix license header * add docs for `mmdeploy_executor_split` * add missing `mmdeploy_executor_transfer_just` and `mmdeploy_executor_execute` * make `TimedSingleThreadContext` header only * fix lint * simplify type-erased sender
214 lines
7.2 KiB
C++
214 lines
7.2 KiB
C++
// Copyright (c) OpenMMLab. All rights reserved.
|
|
|
|
#include "text_detector.h"
|
|
|
|
#include <numeric>
|
|
|
|
#include "apis/c/common_internal.h"
|
|
#include "apis/c/executor_internal.h"
|
|
#include "apis/c/model.h"
|
|
#include "apis/c/pipeline.h"
|
|
#include "codebase/mmocr/mmocr.h"
|
|
#include "core/model.h"
|
|
#include "core/status_code.h"
|
|
#include "core/utils/formatter.h"
|
|
|
|
using namespace std;
|
|
using namespace mmdeploy;
|
|
|
|
namespace {
|
|
|
|
const Value& config_template() {
|
|
// clang-format off
|
|
static Value v{
|
|
{
|
|
"pipeline", {
|
|
{"input", {"img"}},
|
|
{"output", {"dets"}},
|
|
{
|
|
"tasks", {
|
|
{
|
|
{"name", "text-detector"},
|
|
{"type", "Inference"},
|
|
{"params", {{"model", "TBD"}}},
|
|
{"input", {"img"}},
|
|
{"output", {"dets"}}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
};
|
|
return v;
|
|
// clang-format on
|
|
}
|
|
|
|
int mmdeploy_text_detector_create_impl(mm_model_t model, const char* device_name, int device_id,
|
|
mmdeploy_exec_info_t exec_info, mm_handle_t* handle) {
|
|
auto config = config_template();
|
|
config["pipeline"]["tasks"][0]["params"]["model"] = *static_cast<Model*>(model);
|
|
|
|
return mmdeploy_pipeline_create(Cast(&config), device_name, device_id, exec_info, handle);
|
|
}
|
|
|
|
} // namespace
|
|
|
|
int mmdeploy_text_detector_create(mm_model_t model, const char* device_name, int device_id,
|
|
mm_handle_t* handle) {
|
|
return mmdeploy_text_detector_create_impl(model, device_name, device_id, nullptr, handle);
|
|
}
|
|
|
|
int mmdeploy_text_detector_create_v2(mm_model_t model, const char* device_name, int device_id,
|
|
mmdeploy_exec_info_t exec_info, mm_handle_t* handle) {
|
|
return mmdeploy_text_detector_create_impl(model, device_name, device_id, exec_info, handle);
|
|
}
|
|
|
|
int mmdeploy_text_detector_create_by_path(const char* model_path, const char* device_name,
|
|
int device_id, mm_handle_t* handle) {
|
|
mm_model_t model{};
|
|
if (auto ec = mmdeploy_model_create_by_path(model_path, &model)) {
|
|
return ec;
|
|
}
|
|
auto ec = mmdeploy_text_detector_create_impl(model, device_name, device_id, nullptr, handle);
|
|
mmdeploy_model_destroy(model);
|
|
return ec;
|
|
}
|
|
|
|
int mmdeploy_text_detector_create_input(const mm_mat_t* mats, int mat_count,
|
|
mmdeploy_value_t* input) {
|
|
return mmdeploy_common_create_input(mats, mat_count, input);
|
|
}
|
|
|
|
int mmdeploy_text_detector_apply(mm_handle_t handle, const mm_mat_t* mats, int mat_count,
|
|
mm_text_detect_t** results, int** result_count) {
|
|
wrapped<mmdeploy_value_t> input;
|
|
if (auto ec = mmdeploy_text_detector_create_input(mats, mat_count, input.ptr())) {
|
|
return ec;
|
|
}
|
|
wrapped<mmdeploy_value_t> output;
|
|
if (auto ec = mmdeploy_text_detector_apply_v2(handle, input, output.ptr())) {
|
|
return ec;
|
|
}
|
|
if (auto ec = mmdeploy_text_detector_get_result(output, results, result_count)) {
|
|
return ec;
|
|
}
|
|
return MM_SUCCESS;
|
|
}
|
|
|
|
int mmdeploy_text_detector_apply_v2(mm_handle_t handle, mmdeploy_value_t input,
|
|
mmdeploy_value_t* output) {
|
|
return mmdeploy_pipeline_apply(handle, input, output);
|
|
}
|
|
|
|
int mmdeploy_text_detector_apply_async(mm_handle_t handle, mmdeploy_sender_t input,
|
|
mmdeploy_sender_t* output) {
|
|
return mmdeploy_pipeline_apply_async(handle, input, output);
|
|
}
|
|
|
|
int mmdeploy_text_detector_get_result(mmdeploy_value_t output, mm_text_detect_t** results,
|
|
int** result_count) {
|
|
if (!output || !results || !result_count) {
|
|
return MM_E_INVALID_ARG;
|
|
}
|
|
try {
|
|
Value& value = reinterpret_cast<Value*>(output)->front();
|
|
auto detector_outputs = from_value<std::vector<mmocr::TextDetectorOutput>>(value);
|
|
|
|
vector<int> _result_count;
|
|
_result_count.reserve(detector_outputs.size());
|
|
for (const auto& det_output : detector_outputs) {
|
|
_result_count.push_back((int)det_output.scores.size());
|
|
}
|
|
|
|
auto total = std::accumulate(_result_count.begin(), _result_count.end(), 0);
|
|
|
|
std::unique_ptr<int[]> result_count_data(new int[_result_count.size()]{});
|
|
std::copy(_result_count.begin(), _result_count.end(), result_count_data.get());
|
|
|
|
std::unique_ptr<mm_text_detect_t[]> result_data(new mm_text_detect_t[total]{});
|
|
auto result_ptr = result_data.get();
|
|
|
|
for (const auto& det_output : detector_outputs) {
|
|
for (auto i = 0; i < det_output.scores.size(); ++i, ++result_ptr) {
|
|
result_ptr->score = det_output.scores[i];
|
|
auto& bbox = det_output.boxes[i];
|
|
for (auto j = 0; j < bbox.size(); j += 2) {
|
|
result_ptr->bbox[j / 2].x = bbox[j];
|
|
result_ptr->bbox[j / 2].y = bbox[j + 1];
|
|
}
|
|
}
|
|
}
|
|
|
|
*result_count = result_count_data.release();
|
|
*results = result_data.release();
|
|
|
|
return MM_SUCCESS;
|
|
|
|
} catch (const std::exception& e) {
|
|
MMDEPLOY_ERROR("unhandled exception: {}", e.what());
|
|
} catch (...) {
|
|
MMDEPLOY_ERROR("unknown exception caught");
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
void mmdeploy_text_detector_release_result(mm_text_detect_t* results, const int* result_count,
|
|
int count) {
|
|
delete[] results;
|
|
delete[] result_count;
|
|
}
|
|
|
|
void mmdeploy_text_detector_destroy(mm_handle_t handle) { mmdeploy_pipeline_destroy(handle); }
|
|
|
|
int mmdeploy_text_detector_apply_async_v2(mm_handle_t handle, const mm_mat_t* imgs, int img_count,
|
|
mmdeploy_text_detector_continue_t cont, void* context,
|
|
mmdeploy_sender_t* output) {
|
|
mmdeploy_sender_t result_sender{};
|
|
if (auto ec = mmdeploy_text_detector_apply_async_v3(handle, imgs, img_count, &result_sender)) {
|
|
return ec;
|
|
}
|
|
if (auto ec = mmdeploy_text_detector_continue_async(result_sender, cont, context, output)) {
|
|
return ec;
|
|
}
|
|
return MM_SUCCESS;
|
|
}
|
|
|
|
int mmdeploy_text_detector_apply_async_v3(mm_handle_t handle, const mm_mat_t* imgs, int img_count,
|
|
mmdeploy_sender_t* output) {
|
|
wrapped<mmdeploy_value_t> input_val;
|
|
if (auto ec = mmdeploy_text_detector_create_input(imgs, img_count, input_val.ptr())) {
|
|
return ec;
|
|
}
|
|
mmdeploy_sender_t input_sndr = mmdeploy_executor_just(input_val);
|
|
if (auto ec = mmdeploy_text_detector_apply_async(handle, input_sndr, output)) {
|
|
return ec;
|
|
}
|
|
return MM_SUCCESS;
|
|
}
|
|
|
|
int mmdeploy_text_detector_continue_async(mmdeploy_sender_t input,
|
|
mmdeploy_text_detector_continue_t cont, void* context,
|
|
mmdeploy_sender_t* output) {
|
|
auto sender = Guard([&] {
|
|
return Take(
|
|
LetValue(Take(input), [fn = cont, context](Value& value) -> TypeErasedSender<Value> {
|
|
mm_text_detect_t* results{};
|
|
int* result_count{};
|
|
if (auto ec = mmdeploy_text_detector_get_result(Cast(&value), &results, &result_count)) {
|
|
return Just(Value());
|
|
}
|
|
value = nullptr;
|
|
mmdeploy_sender_t output{};
|
|
if (auto ec = fn(results, result_count, context, &output); ec || !output) {
|
|
return Just(Value());
|
|
}
|
|
return Take(output);
|
|
}));
|
|
});
|
|
if (sender) {
|
|
*output = sender;
|
|
return MM_SUCCESS;
|
|
}
|
|
return MM_E_FAIL;
|
|
}
|