// Copyright (c) OpenMMLab. All rights reserved. #include "inference.h" #include "archive/json_archive.h" #include "core/model.h" namespace mmdeploy::graph { Result> InferenceParser::Parse(const Value& config) { try { auto& model_config = config["params"]["model"]; Model model; if (model_config.is_any()) { model = model_config.get(); } else { model = Model(model_config.get()); } OUTCOME_TRY(auto pipeline_json, model.ReadFile("pipeline.json")); auto json = nlohmann::json::parse(pipeline_json); auto context = config.value("context", Value(ValueType::kObject)); context["model"] = std::move(model); auto pipeline_config = from_json(json); pipeline_config["context"] = context; auto inference = std::make_unique(); OUTCOME_TRY(NodeParser::Parse(config, *inference)); OUTCOME_TRY(inference->pipeline_, PipelineParser{}.Parse(pipeline_config)); return std::move(inference); } catch (const Exception& e) { MMDEPLOY_ERROR("exception: {}", e.what()); return failure(e.code()); } } class InferenceCreator : public Creator { public: const char* GetName() const override { return "Inference"; } int GetVersion() const override { return 0; } std::unique_ptr Create(const Value& value) override { return InferenceParser::Parse(value).value(); } }; REGISTER_MODULE(Node, InferenceCreator); } // namespace mmdeploy::graph