|
12 | 12 | #include "utils/tensor.h" |
13 | 13 |
|
14 | 14 | DetectionModel DetectionModel::load(const std::string& model_path, const ov::AnyMap& configuration) { |
15 | | - auto core = ov::Core(); |
16 | | - std::shared_ptr<ov::Model> model = core.read_model(model_path); |
| 15 | + auto adapter = std::make_shared<OpenVINOInferenceAdapter>(); |
| 16 | + adapter->loadModelFile(model_path, "", {}, false); |
17 | 17 |
|
18 | | - if (model->has_rt_info("model_info", "model_type")) { |
19 | | - std::cout << "has model type in info: " << model->get_rt_info<std::string>("model_info", "model_type") |
20 | | - << std::endl; |
21 | | - } else { |
22 | | - throw std::runtime_error("Incorrect or unsupported model_type"); |
23 | | - } |
| 18 | + std::string model_type; |
| 19 | + model_type = utils::get_from_any_maps("model_type", adapter->getModelConfig(), {}, model_type); |
| 20 | + transform(model_type.begin(), model_type.end(), model_type.begin(), ::tolower); |
24 | 21 |
|
25 | | - if (utils::model_has_embedded_processing(model)) { |
26 | | - std::cout << "model already was serialized" << std::endl; |
27 | | - } else { |
28 | | - SSD::serialize(model); |
| 22 | + if (model_type.empty() || model_type != "ssd") { |
| 23 | + throw std::runtime_error("Incorrect or unsupported model_type, expected: ssd"); |
29 | 24 | } |
30 | | - auto adapter = std::make_shared<OpenVINOInferenceAdapter>(); |
31 | | - adapter->loadModel(model, core, "AUTO"); |
| 25 | + adapter->applyModelTransform(SSD::serialize); |
| 26 | + adapter->compileModel("AUTO", {}); |
| 27 | + |
32 | 28 | return DetectionModel(std::make_unique<SSD>(adapter), configuration); |
33 | 29 | } |
34 | 30 |
|
|
0 commit comments