From 7e7d55f507b51764b8a9ed8e934f6c34c2079d91 Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Fri, 8 Nov 2024 11:35:56 +0900 Subject: [PATCH 01/11] Move adapter-specific configuration logic to adapter interface --- .../python/model_api/adapters/inference_adapter.py | 4 ++++ model_api/python/model_api/adapters/onnx_adapter.py | 9 ++++++++- .../python/model_api/adapters/openvino_adapter.py | 10 ++++++++++ model_api/python/model_api/adapters/ovms_adapter.py | 3 +++ model_api/python/model_api/models/model.py | 12 ++++++++---- 5 files changed, 33 insertions(+), 5 deletions(-) diff --git a/model_api/python/model_api/adapters/inference_adapter.py b/model_api/python/model_api/adapters/inference_adapter.py index 464092a0..c0d67021 100644 --- a/model_api/python/model_api/adapters/inference_adapter.py +++ b/model_api/python/model_api/adapters/inference_adapter.py @@ -147,6 +147,10 @@ def await_any(self): def get_rt_info(self, path): """Forwards to openvino.Model.get_rt_info(path)""" + @abstractmethod + def update_model_info(self, model_info: dict[str, str]): + """Updates model with the provided model info.""" + @abstractmethod def embed_preprocessing( self, diff --git a/model_api/python/model_api/adapters/onnx_adapter.py b/model_api/python/model_api/adapters/onnx_adapter.py index 3ddec73b..a7811320 100644 --- a/model_api/python/model_api/adapters/onnx_adapter.py +++ b/model_api/python/model_api/adapters/onnx_adapter.py @@ -53,6 +53,7 @@ def __init__(self, model: str, ort_options: dict = {}): inferred_model.SerializeToString(), **ort_options, ) + self.model = inferred_model self.output_names = [o.name for o in self.session.get_outputs()] self.onnx_metadata = load_parameters_from_onnx(inferred_model) self.preprocessor = lambda arg: arg @@ -164,7 +165,7 @@ def embed_preprocessing( def get_model(self): """Return the reference to the ONNXRuntime session.""" - return self.session + return self.model def reshape_model(self, new_shape): raise NotImplementedError @@ -172,6 +173,12 @@ def reshape_model(self, new_shape): def get_rt_info(self, path): return get_rt_info_from_dict(self.onnx_metadata, path) + def update_model_info(self, model_info: dict[str, str]): + for item in model_info: + meta = self.model.metadata_props.add() + attr_path = "model_info " + item + meta.key = attr_path.strip() + meta.value = model_info[item] _onnx2ov_precision = { "tensor(float)": "f32", diff --git a/model_api/python/model_api/adapters/openvino_adapter.py b/model_api/python/model_api/adapters/openvino_adapter.py index 74593a04..c22447e1 100644 --- a/model_api/python/model_api/adapters/openvino_adapter.py +++ b/model_api/python/model_api/adapters/openvino_adapter.py @@ -406,6 +406,16 @@ def get_model(self): """ return self.model + def update_model_info(self, model_info: dict[str, str]): + """ + Populates OV IR RT info with the given model info. + + Args: + model_info (dict[str, Any]): a dict representing the serialized parameters. + """ + for name in model_info: + self.model.set_rt_info(model_info[name], ["model_info", name]) + def get_input_shape(input_tensor): def string_to_tuple(string, casting_type=int): diff --git a/model_api/python/model_api/adapters/ovms_adapter.py b/model_api/python/model_api/adapters/ovms_adapter.py index 77d11623..be0d27da 100644 --- a/model_api/python/model_api/adapters/ovms_adapter.py +++ b/model_api/python/model_api/adapters/ovms_adapter.py @@ -119,6 +119,9 @@ def get_rt_info(self, path): msg = "OVMSAdapter does not support RT info getting" raise NotImplementedError(msg) + def update_model_info(self, model_info: dict[str, str]): + msg = "OVMSAdapter does not support updating model info" + raise NotImplementedError(msg) _tf2ov_precision = { "DT_INT64": "I64", diff --git a/model_api/python/model_api/models/model.py b/model_api/python/model_api/models/model.py index 9b065455..c439eec5 100644 --- a/model_api/python/model_api/models/model.py +++ b/model_api/python/model_api/models/model.py @@ -91,11 +91,15 @@ def __init__(self, inference_adapter, configuration: dict = {}, preload=False): self.callback_fn = lambda _: None def get_model(self): - model = self.inference_adapter.get_model() - model.set_rt_info(self.__model__, ["model_info", "model_type"]) + model_info = { + "model_type": self.__model__, + } for name in self.parameters(): - model.set_rt_info(getattr(self, name), ["model_info", name]) - return model + model_info[name] = getattr(self, name) + + self.inference_adapter.update_model_info(model_info) + + return self.inference_adapter.get_model() @classmethod def get_model_class(cls, name): From a315d20d57650a165adc40b01529511695b03f0d Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Fri, 8 Nov 2024 14:04:50 +0900 Subject: [PATCH 02/11] Update implementation --- .../python/model_api/adapters/inference_adapter.py | 6 +++++- model_api/python/model_api/adapters/onnx_adapter.py | 12 ++++++++++-- .../python/model_api/adapters/openvino_adapter.py | 6 +++++- model_api/python/model_api/adapters/ovms_adapter.py | 7 ++++++- model_api/python/model_api/models/model.py | 6 ++---- 5 files changed, 28 insertions(+), 9 deletions(-) diff --git a/model_api/python/model_api/adapters/inference_adapter.py b/model_api/python/model_api/adapters/inference_adapter.py index c0d67021..5f337e30 100644 --- a/model_api/python/model_api/adapters/inference_adapter.py +++ b/model_api/python/model_api/adapters/inference_adapter.py @@ -148,9 +148,13 @@ def get_rt_info(self, path): """Forwards to openvino.Model.get_rt_info(path)""" @abstractmethod - def update_model_info(self, model_info: dict[str, str]): + def update_model_info(self, model_info: dict[str, Any]): """Updates model with the provided model info.""" + @abstractmethod + def save_model(self, path: str, weights_path: str, version: str): + """Serializes model to the filesystem.""" + @abstractmethod def embed_preprocessing( self, diff --git a/model_api/python/model_api/adapters/onnx_adapter.py b/model_api/python/model_api/adapters/onnx_adapter.py index a7811320..f028ee55 100644 --- a/model_api/python/model_api/adapters/onnx_adapter.py +++ b/model_api/python/model_api/adapters/onnx_adapter.py @@ -5,6 +5,7 @@ import sys from functools import partial, reduce +from typing import Any import numpy as np @@ -173,12 +174,19 @@ def reshape_model(self, new_shape): def get_rt_info(self, path): return get_rt_info_from_dict(self.onnx_metadata, path) - def update_model_info(self, model_info: dict[str, str]): + def update_model_info(self, model_info: dict[str, Any]): for item in model_info: meta = self.model.metadata_props.add() attr_path = "model_info " + item meta.key = attr_path.strip() - meta.value = model_info[item] + if isinstance(model_info[item], list): + meta.value = " ".join(str(x) for x in model_info[item]) + else: + meta.value = str(model_info[item]) + + def save_model(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + onnx.save(self.model, path) + _onnx2ov_precision = { "tensor(float)": "f32", diff --git a/model_api/python/model_api/adapters/openvino_adapter.py b/model_api/python/model_api/adapters/openvino_adapter.py index c22447e1..9e4b507a 100644 --- a/model_api/python/model_api/adapters/openvino_adapter.py +++ b/model_api/python/model_api/adapters/openvino_adapter.py @@ -5,6 +5,7 @@ import logging as log from pathlib import Path +from typing import Any try: import openvino.runtime as ov @@ -406,7 +407,7 @@ def get_model(self): """ return self.model - def update_model_info(self, model_info: dict[str, str]): + def update_model_info(self, model_info: dict[str, Any]): """ Populates OV IR RT info with the given model info. @@ -416,6 +417,9 @@ def update_model_info(self, model_info: dict[str, str]): for name in model_info: self.model.set_rt_info(model_info[name], ["model_info", name]) + def save_model(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + ov.serialize(self.get_model(), path, weights_path, version) + def get_input_shape(input_tensor): def string_to_tuple(string, casting_type=int): diff --git a/model_api/python/model_api/adapters/ovms_adapter.py b/model_api/python/model_api/adapters/ovms_adapter.py index be0d27da..38be49ae 100644 --- a/model_api/python/model_api/adapters/ovms_adapter.py +++ b/model_api/python/model_api/adapters/ovms_adapter.py @@ -4,6 +4,7 @@ # import re +from typing import Any import numpy as np @@ -119,10 +120,14 @@ def get_rt_info(self, path): msg = "OVMSAdapter does not support RT info getting" raise NotImplementedError(msg) - def update_model_info(self, model_info: dict[str, str]): + def update_model_info(self, model_info: dict[str, Any]): msg = "OVMSAdapter does not support updating model info" raise NotImplementedError(msg) + def save_model(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + msg = "OVMSAdapter does not support saving a model" + raise NotImplementedError(msg) + _tf2ov_precision = { "DT_INT64": "I64", "DT_UINT64": "U64", diff --git a/model_api/python/model_api/models/model.py b/model_api/python/model_api/models/model.py index c439eec5..f5c3ba8e 100644 --- a/model_api/python/model_api/models/model.py +++ b/model_api/python/model_api/models/model.py @@ -494,7 +494,5 @@ def log_layers_info(self): f"precision: {metadata.precision}, layout: {metadata.layout}", ) - def save(self, xml_path, bin_path="", version="UNSPECIFIED"): - import openvino - - openvino.serialize(self.get_model(), xml_path, bin_path, version) + def save(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + self.inference_adapter.save_model(path, weights_path, version) From c3db4ed47c5b062b4d437691738c803827757091 Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Fri, 8 Nov 2024 14:05:28 +0900 Subject: [PATCH 03/11] Add tests for saving via onnx adapter --- tests/python/funtional/test_save.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/tests/python/funtional/test_save.py b/tests/python/funtional/test_save.py index b69a9b48..f9d674f9 100644 --- a/tests/python/funtional/test_save.py +++ b/tests/python/funtional/test_save.py @@ -3,6 +3,8 @@ # SPDX-License-Identifier: Apache-2.0 # from model_api.models import Model +from model_api.adapters import ONNXRuntimeAdapter +from model_api.adapters.utils import load_parameters_from_onnx def test_detector_save(tmp_path): @@ -56,3 +58,25 @@ def test_segmentor_save(tmp_path): assert type(downloaded) is type(deserialized) for attr in downloaded.parameters(): assert getattr(downloaded, attr) == getattr(deserialized, attr) + + +def test_onnx_save(tmp_path): + cls_model = Model.create_model( + ONNXRuntimeAdapter("data/otx_models/cls_mobilenetv3_large_cars.onnx"), + model_type="Classification", + preload=True, + configuration={"reverse_input_channels": True, "topk": 6}, + ) + + assert ( + load_parameters_from_onnx(cls_model.get_model()) + ["model_info"]["embedded_processing"] == "True" + ) + + onnx_path = str(tmp_path / "a.onnx") + cls_model.save(onnx_path) + + deserialized = Model.create_model(onnx_path) + assert type(cls_model) is type(deserialized) + for attr in cls_model.parameters(): + assert getattr(cls_model, attr) == getattr(deserialized, attr) From c113eac85026a15d3da622658a2a96a6ec055a15 Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Sat, 9 Nov 2024 04:54:54 +0900 Subject: [PATCH 04/11] Fix linter --- model_api/python/model_api/adapters/ovms_adapter.py | 1 + tests/python/funtional/test_save.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/model_api/python/model_api/adapters/ovms_adapter.py b/model_api/python/model_api/adapters/ovms_adapter.py index 02449977..d07d479e 100644 --- a/model_api/python/model_api/adapters/ovms_adapter.py +++ b/model_api/python/model_api/adapters/ovms_adapter.py @@ -131,6 +131,7 @@ def save_model(self, path: str, weights_path: str = "", version: str = "UNSPECIF msg = "OVMSAdapter does not support saving a model" raise NotImplementedError(msg) + _tf2ov_precision = { "DT_INT64": "I64", "DT_UINT64": "U64", diff --git a/tests/python/funtional/test_save.py b/tests/python/funtional/test_save.py index f9d674f9..6f8c26cb 100644 --- a/tests/python/funtional/test_save.py +++ b/tests/python/funtional/test_save.py @@ -69,8 +69,10 @@ def test_onnx_save(tmp_path): ) assert ( - load_parameters_from_onnx(cls_model.get_model()) - ["model_info"]["embedded_processing"] == "True" + load_parameters_from_onnx(cls_model.get_model())["model_info"][ + "embedded_processing" + ] + == "True" ) onnx_path = str(tmp_path / "a.onnx") From e39ccfc9b55ff42662216cd9bd12b1f2d4f7b6e2 Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Sat, 9 Nov 2024 05:25:10 +0900 Subject: [PATCH 05/11] Restore create model from adapter --- model_api/python/model_api/models/model.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/model_api/python/model_api/models/model.py b/model_api/python/model_api/models/model.py index 51a4340a..ff007308 100644 --- a/model_api/python/model_api/models/model.py +++ b/model_api/python/model_api/models/model.py @@ -11,6 +11,7 @@ from typing import TYPE_CHECKING, Any, NoReturn, Type from model_api.adapters.onnx_adapter import ONNXRuntimeAdapter +from model_api.adapters.inference_adapter import InferenceAdapter from model_api.adapters.openvino_adapter import ( OpenvinoAdapter, create_core, @@ -23,7 +24,6 @@ from numpy import ndarray - from model_api.adapters.inference_adapter import InferenceAdapter class WrapperError(Exception): @@ -126,7 +126,7 @@ def get_model_class(cls, name: str) -> Type: @classmethod def create_model( cls, - model: str, + model: str | InferenceAdapter, model_type: Any | None = None, configuration: dict[str, Any] = {}, preload: bool = True, @@ -144,7 +144,7 @@ def create_model( """Create an instance of the Model API model Args: - model (str): model name from OpenVINO Model Zoo, path to model, OVMS URL + model (str| InferenceAdapter): model name from OpenVINO Model Zoo, path to model, OVMS URL, or an adapter configuration (:obj:`dict`, optional): dictionary of model config with model properties, for example confidence_threshold, labels model_type (:obj:`str`, optional): name of model wrapper to create (e.g. "ssd") @@ -166,7 +166,9 @@ def create_model( Model object """ inference_adapter: InferenceAdapter - if isinstance(model, str) and re.compile( + if isinstance(model, InferenceAdapter): + inference_adapter = model + elif isinstance(model, str) and re.compile( r"(\w+\.*\-*)*\w+:\d+\/models\/[a-zA-Z0-9._-]+(\:\d+)*", ).fullmatch(model): inference_adapter = OVMSAdapter(model) From d3e27584a115192073c4deb1b42099b6b2f1194b Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Sat, 9 Nov 2024 05:38:03 +0900 Subject: [PATCH 06/11] Update functional tests --- .github/workflows/test_precommit.yml | 2 +- tests/python/funtional/__init__.py | 4 ++++ tests/python/funtional/conftest.py | 15 +++++++++++++++ tests/python/funtional/test_load.py | 7 +++++-- tests/python/funtional/test_save.py | 7 +++++-- 5 files changed, 30 insertions(+), 5 deletions(-) create mode 100644 tests/python/funtional/__init__.py create mode 100644 tests/python/funtional/conftest.py diff --git a/.github/workflows/test_precommit.yml b/.github/workflows/test_precommit.yml index 85bb440a..0a042a9b 100644 --- a/.github/workflows/test_precommit.yml +++ b/.github/workflows/test_precommit.yml @@ -35,7 +35,7 @@ jobs: - name: Run test run: | source venv/bin/activate - pytest tests/python/funtional + pytest --data=./data tests/python/funtional CPP-Code-Quality: name: CPP-Code-Quality runs-on: ubuntu-latest diff --git a/tests/python/funtional/__init__.py b/tests/python/funtional/__init__.py new file mode 100644 index 00000000..7ac51207 --- /dev/null +++ b/tests/python/funtional/__init__.py @@ -0,0 +1,4 @@ +# +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/tests/python/funtional/conftest.py b/tests/python/funtional/conftest.py new file mode 100644 index 00000000..2ef51d3d --- /dev/null +++ b/tests/python/funtional/conftest.py @@ -0,0 +1,15 @@ +# +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +import pytest + + +def pytest_addoption(parser): + parser.addoption("--data", action="store", help="data folder with dataset") + + +@pytest.fixture(scope="session") +def data(pytestconfig): + return pytestconfig.getoption("data") diff --git a/tests/python/funtional/test_load.py b/tests/python/funtional/test_load.py index a0d79e0c..37dc6c98 100644 --- a/tests/python/funtional/test_load.py +++ b/tests/python/funtional/test_load.py @@ -2,13 +2,16 @@ # Copyright (C) 2020-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # + +from pathlib import Path + from model_api.models import Model -def test_model_with_unnamed_output_load(): +def test_model_with_unnamed_output_load(data): # the model's output doesn't have a name _ = Model.create_model( - "data/otx_models/tinynet_imagenet.xml", + Path(data) / "otx_models/tinynet_imagenet.xml", model_type="Classification", preload=True, ) diff --git a/tests/python/funtional/test_save.py b/tests/python/funtional/test_save.py index 6f8c26cb..61511e13 100644 --- a/tests/python/funtional/test_save.py +++ b/tests/python/funtional/test_save.py @@ -2,6 +2,9 @@ # Copyright (C) 2020-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # + +from pathlib import Path + from model_api.models import Model from model_api.adapters import ONNXRuntimeAdapter from model_api.adapters.utils import load_parameters_from_onnx @@ -60,9 +63,9 @@ def test_segmentor_save(tmp_path): assert getattr(downloaded, attr) == getattr(deserialized, attr) -def test_onnx_save(tmp_path): +def test_onnx_save(tmp_path, data): cls_model = Model.create_model( - ONNXRuntimeAdapter("data/otx_models/cls_mobilenetv3_large_cars.onnx"), + ONNXRuntimeAdapter(Path(data) / "otx_models/cls_mobilenetv3_large_cars.onnx"), model_type="Classification", preload=True, configuration={"reverse_input_channels": True, "topk": 6}, From dbb89fcb1c54c126f98dab827a823624aad3b3ed Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Sat, 9 Nov 2024 07:11:59 +0900 Subject: [PATCH 07/11] Fix imports order --- model_api/python/model_api/models/model.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/model_api/python/model_api/models/model.py b/model_api/python/model_api/models/model.py index ff007308..ca8b6c52 100644 --- a/model_api/python/model_api/models/model.py +++ b/model_api/python/model_api/models/model.py @@ -10,8 +10,8 @@ from contextlib import contextmanager from typing import TYPE_CHECKING, Any, NoReturn, Type -from model_api.adapters.onnx_adapter import ONNXRuntimeAdapter from model_api.adapters.inference_adapter import InferenceAdapter +from model_api.adapters.onnx_adapter import ONNXRuntimeAdapter from model_api.adapters.openvino_adapter import ( OpenvinoAdapter, create_core, @@ -25,7 +25,6 @@ from numpy import ndarray - class WrapperError(Exception): """The class for errors occurred in Model API wrappers""" From be71986f4f48ab3009e0398260bce82b0a923131 Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Sat, 9 Nov 2024 07:18:01 +0900 Subject: [PATCH 08/11] Update precommit data deps --- tests/cpp/precommit/public_scope.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/cpp/precommit/public_scope.json b/tests/cpp/precommit/public_scope.json index b7ded6b1..1a7d2911 100644 --- a/tests/cpp/precommit/public_scope.json +++ b/tests/cpp/precommit/public_scope.json @@ -15,6 +15,10 @@ "name": "otx_models/mlc_mobilenetv3_large_voc.xml", "type": "ClassificationModel" }, + { + "name": "otx_models/cls_mobilenetv3_large_cars.onnx", + "type": "ClassificationModel" + }, { "name": "hrnet-v2-c1-segmentation", "type": "SegmentationModel" From 5b63bd418442e517a7eb06965f35634df65e9e02 Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Sat, 9 Nov 2024 07:26:42 +0900 Subject: [PATCH 09/11] Fix data preparation script --- tests/cpp/precommit/prepare_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cpp/precommit/prepare_data.py b/tests/cpp/precommit/prepare_data.py index 22b82f46..a92a947f 100644 --- a/tests/cpp/precommit/prepare_data.py +++ b/tests/cpp/precommit/prepare_data.py @@ -30,7 +30,7 @@ def prepare_model( public_scope = json.load(f) for model in public_scope: - if model["name"].endswith(".xml"): + if model["name"].endswith(".xml") or model["name"].endswith(".onnx"): continue model = eval(model["type"]).create_model(model["name"], download_dir=data_dir) From ced4b2c0330ecaf9b2a755c06a1912762a820ba3 Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Sat, 9 Nov 2024 07:45:43 +0900 Subject: [PATCH 10/11] Fix onnx models loading --- tests/cpp/precommit/prepare_data.py | 29 +++++++++++++++++---------- tests/cpp/precommit/public_scope.json | 4 ---- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/tests/cpp/precommit/prepare_data.py b/tests/cpp/precommit/prepare_data.py index a92a947f..8a7f4ac7 100644 --- a/tests/cpp/precommit/prepare_data.py +++ b/tests/cpp/precommit/prepare_data.py @@ -5,17 +5,23 @@ from urllib.request import urlopen, urlretrieve -def retrieve_otx_model(data_dir, model_name): - destenation_folder = os.path.join(data_dir, "otx_models") - os.makedirs(destenation_folder, exist_ok=True) - urlretrieve( - f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/openvino.xml", - f"{destenation_folder}/{model_name}.xml", - ) - urlretrieve( - f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/openvino.bin", - f"{destenation_folder}/{model_name}.bin", - ) +def retrieve_otx_model(data_dir, model_name, format="xml"): + destination_folder = os.path.join(data_dir, "otx_models") + os.makedirs(destination_folder, exist_ok=True) + if format == "onnx": + urlretrieve( + f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/model.onnx", + f"{destination_folder}/{model_name}.onnx", + ) + else: + urlretrieve( + f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/openvino.xml", + f"{destination_folder}/{model_name}.xml", + ) + urlretrieve( + f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/openvino.bin", + f"{destination_folder}/{model_name}.bin", + ) def prepare_model( @@ -72,3 +78,4 @@ def prepare_data(data_dir="./data"): prepare_data(args.data_dir) retrieve_otx_model(args.data_dir, "mlc_mobilenetv3_large_voc") retrieve_otx_model(args.data_dir, "tinynet_imagenet") + retrieve_otx_model(args.data_dir, "cls_mobilenetv3_large_cars", "onnx") diff --git a/tests/cpp/precommit/public_scope.json b/tests/cpp/precommit/public_scope.json index 1a7d2911..b7ded6b1 100644 --- a/tests/cpp/precommit/public_scope.json +++ b/tests/cpp/precommit/public_scope.json @@ -15,10 +15,6 @@ "name": "otx_models/mlc_mobilenetv3_large_voc.xml", "type": "ClassificationModel" }, - { - "name": "otx_models/cls_mobilenetv3_large_cars.onnx", - "type": "ClassificationModel" - }, { "name": "hrnet-v2-c1-segmentation", "type": "SegmentationModel" From 29b633ebcc6f436bef5283dcde9733da67a26247 Mon Sep 17 00:00:00 2001 From: Vladislav Sovrasov Date: Sat, 9 Nov 2024 10:19:10 +0900 Subject: [PATCH 11/11] Fix saving logic --- model_api/python/model_api/models/model.py | 15 ++++---- tests/python/funtional/test_save.py | 40 ++++++++++++---------- 2 files changed, 29 insertions(+), 26 deletions(-) diff --git a/model_api/python/model_api/models/model.py b/model_api/python/model_api/models/model.py index ca8b6c52..6f001454 100644 --- a/model_api/python/model_api/models/model.py +++ b/model_api/python/model_api/models/model.py @@ -99,14 +99,6 @@ def __init__(self, inference_adapter: InferenceAdapter, configuration: dict = {} self.callback_fn = lambda _: None def get_model(self): - model_info = { - "model_type": self.__model__, - } - for name in self.parameters(): - model_info[name] = getattr(self, name) - - self.inference_adapter.update_model_info(model_info) - return self.inference_adapter.get_model() @classmethod @@ -493,4 +485,11 @@ def log_layers_info(self): ) def save(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + model_info = { + "model_type": self.__model__, + } + for name in self.parameters(): + model_info[name] = getattr(self, name) + + self.inference_adapter.update_model_info(model_info) self.inference_adapter.save_model(path, weights_path, version) diff --git a/tests/python/funtional/test_save.py b/tests/python/funtional/test_save.py index 61511e13..339293b0 100644 --- a/tests/python/funtional/test_save.py +++ b/tests/python/funtional/test_save.py @@ -5,6 +5,8 @@ from pathlib import Path +import onnx + from model_api.models import Model from model_api.adapters import ONNXRuntimeAdapter from model_api.adapters.utils import load_parameters_from_onnx @@ -15,14 +17,15 @@ def test_detector_save(tmp_path): "ssd_mobilenet_v1_fpn_coco", configuration={"mean_values": [0, 0, 0], "confidence_threshold": 0.6}, ) + xml_path = str(tmp_path / "a.xml") + downloaded.save(xml_path) + deserialized = Model.create_model(xml_path) + assert ( - downloaded.get_model() + deserialized.get_model() .get_rt_info(["model_info", "embedded_processing"]) .astype(bool) ) - xml_path = str(tmp_path / "a.xml") - downloaded.save(xml_path) - deserialized = Model.create_model(xml_path) assert type(downloaded) is type(deserialized) for attr in downloaded.parameters(): assert getattr(downloaded, attr) == getattr(deserialized, attr) @@ -32,14 +35,15 @@ def test_classifier_save(tmp_path): downloaded = Model.create_model( "efficientnet-b0-pytorch", configuration={"scale_values": [1, 1, 1], "topk": 6} ) + xml_path = str(tmp_path / "a.xml") + downloaded.save(xml_path) + deserialized = Model.create_model(xml_path) + assert ( - downloaded.get_model() + deserialized.get_model() .get_rt_info(["model_info", "embedded_processing"]) .astype(bool) ) - xml_path = str(tmp_path / "a.xml") - downloaded.save(xml_path) - deserialized = Model.create_model(xml_path) assert type(downloaded) is type(deserialized) for attr in downloaded.parameters(): assert getattr(downloaded, attr) == getattr(deserialized, attr) @@ -50,14 +54,15 @@ def test_segmentor_save(tmp_path): "hrnet-v2-c1-segmentation", configuration={"reverse_input_channels": True, "labels": ["first", "second"]}, ) + xml_path = str(tmp_path / "a.xml") + downloaded.save(xml_path) + deserialized = Model.create_model(xml_path) + assert ( - downloaded.get_model() + deserialized.get_model() .get_rt_info(["model_info", "embedded_processing"]) .astype(bool) ) - xml_path = str(tmp_path / "a.xml") - downloaded.save(xml_path) - deserialized = Model.create_model(xml_path) assert type(downloaded) is type(deserialized) for attr in downloaded.parameters(): assert getattr(downloaded, attr) == getattr(deserialized, attr) @@ -71,17 +76,16 @@ def test_onnx_save(tmp_path, data): configuration={"reverse_input_channels": True, "topk": 6}, ) + onnx_path = str(tmp_path / "a.onnx") + cls_model.save(onnx_path) + deserialized = Model.create_model(onnx_path) + assert ( - load_parameters_from_onnx(cls_model.get_model())["model_info"][ + load_parameters_from_onnx(onnx.load(onnx_path))["model_info"][ "embedded_processing" ] == "True" ) - - onnx_path = str(tmp_path / "a.onnx") - cls_model.save(onnx_path) - - deserialized = Model.create_model(onnx_path) assert type(cls_model) is type(deserialized) for attr in cls_model.parameters(): assert getattr(cls_model, attr) == getattr(deserialized, attr)