PaddleClas/deploy/lite_shitu/include/feature_extractor.h

92 lines
2.9 KiB
C
Raw Normal View History

2022-01-22 20:17:59 +08:00
// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
2022-01-22 20:17:59 +08:00
#include "paddle_api.h" // NOLINT
#include "json/json.h"
#include <arm_neon.h>
#include <chrono>
#include <fstream>
#include <iostream>
#include <math.h>
#include <opencv2/opencv.hpp>
2022-01-24 11:41:46 +08:00
#include <stdlib.h>
2022-01-22 20:17:59 +08:00
#include <sys/time.h>
#include <vector>
using namespace paddle::lite_api; // NOLINT
using namespace std;
namespace PPShiTu {
struct RESULT {
std::string class_name;
int class_id;
float score;
};
2022-03-08 14:23:21 +08:00
class FeatureExtract {
2022-01-22 20:17:59 +08:00
public:
2022-03-08 14:23:21 +08:00
explicit FeatureExtract(const Json::Value &config_file) {
2022-01-22 20:17:59 +08:00
MobileConfig config;
if (config_file["Global"]["rec_model_path"].as<std::string>().empty()) {
std::cout << "Please set [rec_model_path] in config file" << std::endl;
2022-01-24 11:41:46 +08:00
exit(-1);
2022-01-22 20:17:59 +08:00
}
config.set_model_from_file(
config_file["Global"]["rec_model_path"].as<std::string>());
2022-01-24 11:41:46 +08:00
this->predictor = CreatePaddlePredictor<MobileConfig>(config);
2022-01-22 20:17:59 +08:00
if (config_file["Global"]["rec_label_path"].as<std::string>().empty()) {
std::cout << "Please set [rec_label_path] in config file" << std::endl;
2022-01-24 11:41:46 +08:00
exit(-1);
2022-01-22 20:17:59 +08:00
}
SetPreProcessParam(config_file["RecPreProcess"]["transform_ops"]);
2022-03-08 14:23:21 +08:00
printf("feature extract model create!\n");
2022-01-22 20:17:59 +08:00
}
void SetPreProcessParam(const Json::Value &config_file) {
for (const auto &item : config_file) {
auto op_name = item["type"].as<std::string>();
if (op_name == "ResizeImage") {
2022-01-24 11:41:46 +08:00
this->size = item["size"].as<int>();
2022-01-22 20:17:59 +08:00
} else if (op_name == "NormalizeImage") {
2022-01-24 11:41:46 +08:00
this->mean.clear();
this->std.clear();
2022-01-22 20:17:59 +08:00
for (auto tmp : item["mean"]) {
2022-01-24 11:41:46 +08:00
this->mean.emplace_back(tmp.as<float>());
2022-01-22 20:17:59 +08:00
}
for (auto tmp : item["std"]) {
2022-01-24 11:41:46 +08:00
this->std.emplace_back(1 / tmp.as<float>());
2022-01-22 20:17:59 +08:00
}
2022-01-24 11:41:46 +08:00
this->scale = item["scale"].as<double>();
2022-01-22 20:17:59 +08:00
}
}
2022-01-24 11:41:46 +08:00
}
2022-01-22 20:17:59 +08:00
2022-03-08 14:23:21 +08:00
void RunRecModel(const cv::Mat &img, double &cost_time, std::vector<float> &feature);
//void PostProcess(std::vector<float> &feature);
2022-01-24 11:41:46 +08:00
cv::Mat ResizeImage(const cv::Mat &img);
void NeonMeanScale(const float *din, float *dout, int size);
2022-01-22 20:17:59 +08:00
2022-01-24 11:41:46 +08:00
private:
std::shared_ptr<PaddlePredictor> predictor;
2022-03-08 14:23:21 +08:00
//std::vector<std::string> label_list;
2022-01-24 11:41:46 +08:00
std::vector<float> mean = {0.485f, 0.456f, 0.406f};
std::vector<float> std = {1 / 0.229f, 1 / 0.224f, 1 / 0.225f};
double scale = 0.00392157;
float size = 224;
};
} // namespace PPShiTu