|
5 | 5 | #include "utils/tensor.h" |
6 | 6 |
|
7 | 7 | void Anomaly::serialize(std::shared_ptr<ov::Model>& ov_model) { |
| 8 | + if (utils::model_has_embedded_processing(ov_model)) { |
| 9 | + std::cout << "model already was serialized" << std::endl; |
| 10 | + return; |
| 11 | + } |
| 12 | + |
8 | 13 | auto input = ov_model->inputs().front(); |
9 | 14 |
|
10 | 15 | auto layout = ov::layout::get_layout(input); |
@@ -47,23 +52,21 @@ void Anomaly::serialize(std::shared_ptr<ov::Model>& ov_model) { |
47 | 52 | } |
48 | 53 |
|
49 | 54 | Anomaly Anomaly::load(const std::string& model_path) { |
50 | | - auto core = ov::Core(); |
51 | | - std::shared_ptr<ov::Model> model = core.read_model(model_path); |
| 55 | + auto adapter = std::make_shared<OpenVINOInferenceAdapter>(); |
| 56 | + adapter->loadModelFile(model_path, "", {}, false); |
| 57 | + |
| 58 | + std::string model_type; |
| 59 | + model_type = utils::get_from_any_maps("model_type", adapter->getModelConfig(), {}, model_type); |
52 | 60 |
|
53 | | - if (model->has_rt_info("model_info", "model_type")) { |
54 | | - std::cout << "has model type in info: " << model->get_rt_info<std::string>("model_info", "model_type") |
55 | | - << std::endl; |
| 61 | + if (!model_type.empty()) { |
| 62 | + std::cout << "has model type in info: " << model_type << std::endl; |
56 | 63 | } else { |
57 | 64 | throw std::runtime_error("Incorrect or unsupported model_type"); |
58 | 65 | } |
59 | 66 |
|
60 | | - if (utils::model_has_embedded_processing(model)) { |
61 | | - std::cout << "model already was serialized" << std::endl; |
62 | | - } else { |
63 | | - serialize(model); |
64 | | - } |
65 | | - auto adapter = std::make_shared<OpenVINOInferenceAdapter>(); |
66 | | - adapter->loadModel(model, core, "AUTO"); |
| 67 | + adapter->applyModelTransform(Anomaly::serialize); |
| 68 | + adapter->compileModel("AUTO", {}); |
| 69 | + |
67 | 70 | return Anomaly(adapter); |
68 | 71 | } |
69 | 72 |
|
|
0 commit comments