diff --git a/.github/workflows/test_precommit.yml b/.github/workflows/test_precommit.yml index 85bb440a..0a042a9b 100644 --- a/.github/workflows/test_precommit.yml +++ b/.github/workflows/test_precommit.yml @@ -35,7 +35,7 @@ jobs: - name: Run test run: | source venv/bin/activate - pytest tests/python/funtional + pytest --data=./data tests/python/funtional CPP-Code-Quality: name: CPP-Code-Quality runs-on: ubuntu-latest diff --git a/model_api/python/model_api/adapters/inference_adapter.py b/model_api/python/model_api/adapters/inference_adapter.py index 6cf1a87c..efebe977 100644 --- a/model_api/python/model_api/adapters/inference_adapter.py +++ b/model_api/python/model_api/adapters/inference_adapter.py @@ -163,6 +163,14 @@ def await_any(self): def get_rt_info(self, path): """Forwards to openvino.Model.get_rt_info(path)""" + @abstractmethod + def update_model_info(self, model_info: dict[str, Any]): + """Updates model with the provided model info.""" + + @abstractmethod + def save_model(self, path: str, weights_path: str, version: str): + """Serializes model to the filesystem.""" + @abstractmethod def embed_preprocessing( self, diff --git a/model_api/python/model_api/adapters/onnx_adapter.py b/model_api/python/model_api/adapters/onnx_adapter.py index 67fe5367..fd56e2b5 100644 --- a/model_api/python/model_api/adapters/onnx_adapter.py +++ b/model_api/python/model_api/adapters/onnx_adapter.py @@ -7,6 +7,7 @@ import sys from functools import partial, reduce +from typing import Any import numpy as np @@ -55,6 +56,7 @@ def __init__(self, model: str, ort_options: dict = {}): inferred_model.SerializeToString(), **ort_options, ) + self.model = inferred_model self.output_names = [o.name for o in self.session.get_outputs()] self.onnx_metadata = load_parameters_from_onnx(inferred_model) self.preprocessor = lambda arg: arg @@ -169,7 +171,7 @@ def embed_preprocessing( def get_model(self): """Return the reference to the ONNXRuntime session.""" - return self.session + return self.model def reshape_model(self, new_shape): raise NotImplementedError @@ -177,6 +179,19 @@ def reshape_model(self, new_shape): def get_rt_info(self, path): return get_rt_info_from_dict(self.onnx_metadata, path) + def update_model_info(self, model_info: dict[str, Any]): + for item in model_info: + meta = self.model.metadata_props.add() + attr_path = "model_info " + item + meta.key = attr_path.strip() + if isinstance(model_info[item], list): + meta.value = " ".join(str(x) for x in model_info[item]) + else: + meta.value = str(model_info[item]) + + def save_model(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + onnx.save(self.model, path) + _onnx2ov_precision = { "tensor(float)": "f32", diff --git a/model_api/python/model_api/adapters/openvino_adapter.py b/model_api/python/model_api/adapters/openvino_adapter.py index df09b495..03f973e0 100644 --- a/model_api/python/model_api/adapters/openvino_adapter.py +++ b/model_api/python/model_api/adapters/openvino_adapter.py @@ -419,6 +419,19 @@ def get_model(self): """ return self.model + def update_model_info(self, model_info: dict[str, Any]): + """ + Populates OV IR RT info with the given model info. + + Args: + model_info (dict[str, Any]): a dict representing the serialized parameters. + """ + for name in model_info: + self.model.set_rt_info(model_info[name], ["model_info", name]) + + def save_model(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + ov.serialize(self.get_model(), path, weights_path, version) + def get_input_shape(input_tensor: ov.Output) -> list[int]: def string_to_tuple(string, casting_type=int): diff --git a/model_api/python/model_api/adapters/ovms_adapter.py b/model_api/python/model_api/adapters/ovms_adapter.py index 42c63f1c..d07d479e 100644 --- a/model_api/python/model_api/adapters/ovms_adapter.py +++ b/model_api/python/model_api/adapters/ovms_adapter.py @@ -4,6 +4,7 @@ # import re +from typing import Any import numpy as np @@ -122,6 +123,14 @@ def get_rt_info(self, path): msg = "OVMSAdapter does not support RT info getting" raise NotImplementedError(msg) + def update_model_info(self, model_info: dict[str, Any]): + msg = "OVMSAdapter does not support updating model info" + raise NotImplementedError(msg) + + def save_model(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + msg = "OVMSAdapter does not support saving a model" + raise NotImplementedError(msg) + _tf2ov_precision = { "DT_INT64": "I64", diff --git a/model_api/python/model_api/models/model.py b/model_api/python/model_api/models/model.py index 1eb7a606..6f001454 100644 --- a/model_api/python/model_api/models/model.py +++ b/model_api/python/model_api/models/model.py @@ -10,6 +10,7 @@ from contextlib import contextmanager from typing import TYPE_CHECKING, Any, NoReturn, Type +from model_api.adapters.inference_adapter import InferenceAdapter from model_api.adapters.onnx_adapter import ONNXRuntimeAdapter from model_api.adapters.openvino_adapter import ( OpenvinoAdapter, @@ -23,8 +24,6 @@ from numpy import ndarray - from model_api.adapters.inference_adapter import InferenceAdapter - class WrapperError(Exception): """The class for errors occurred in Model API wrappers""" @@ -100,11 +99,7 @@ def __init__(self, inference_adapter: InferenceAdapter, configuration: dict = {} self.callback_fn = lambda _: None def get_model(self): - model = self.inference_adapter.get_model() - model.set_rt_info(self.__model__, ["model_info", "model_type"]) - for name in self.parameters(): - model.set_rt_info(getattr(self, name), ["model_info", name]) - return model + return self.inference_adapter.get_model() @classmethod def get_model_class(cls, name: str) -> Type: @@ -122,7 +117,7 @@ def get_model_class(cls, name: str) -> Type: @classmethod def create_model( cls, - model: str, + model: str | InferenceAdapter, model_type: Any | None = None, configuration: dict[str, Any] = {}, preload: bool = True, @@ -140,7 +135,7 @@ def create_model( """Create an instance of the Model API model Args: - model (str): model name from OpenVINO Model Zoo, path to model, OVMS URL + model (str| InferenceAdapter): model name from OpenVINO Model Zoo, path to model, OVMS URL, or an adapter configuration (:obj:`dict`, optional): dictionary of model config with model properties, for example confidence_threshold, labels model_type (:obj:`str`, optional): name of model wrapper to create (e.g. "ssd") @@ -162,7 +157,9 @@ def create_model( Model object """ inference_adapter: InferenceAdapter - if isinstance(model, str) and re.compile( + if isinstance(model, InferenceAdapter): + inference_adapter = model + elif isinstance(model, str) and re.compile( r"(\w+\.*\-*)*\w+:\d+\/models\/[a-zA-Z0-9._-]+(\:\d+)*", ).fullmatch(model): inference_adapter = OVMSAdapter(model) @@ -487,7 +484,12 @@ def log_layers_info(self): f"precision: {metadata.precision}, layout: {metadata.layout}", ) - def save(self, xml_path, bin_path="", version="UNSPECIFIED"): - import openvino + def save(self, path: str, weights_path: str = "", version: str = "UNSPECIFIED"): + model_info = { + "model_type": self.__model__, + } + for name in self.parameters(): + model_info[name] = getattr(self, name) - openvino.serialize(self.get_model(), xml_path, bin_path, version) + self.inference_adapter.update_model_info(model_info) + self.inference_adapter.save_model(path, weights_path, version) diff --git a/tests/cpp/precommit/prepare_data.py b/tests/cpp/precommit/prepare_data.py index 22b82f46..8a7f4ac7 100644 --- a/tests/cpp/precommit/prepare_data.py +++ b/tests/cpp/precommit/prepare_data.py @@ -5,17 +5,23 @@ from urllib.request import urlopen, urlretrieve -def retrieve_otx_model(data_dir, model_name): - destenation_folder = os.path.join(data_dir, "otx_models") - os.makedirs(destenation_folder, exist_ok=True) - urlretrieve( - f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/openvino.xml", - f"{destenation_folder}/{model_name}.xml", - ) - urlretrieve( - f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/openvino.bin", - f"{destenation_folder}/{model_name}.bin", - ) +def retrieve_otx_model(data_dir, model_name, format="xml"): + destination_folder = os.path.join(data_dir, "otx_models") + os.makedirs(destination_folder, exist_ok=True) + if format == "onnx": + urlretrieve( + f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/model.onnx", + f"{destination_folder}/{model_name}.onnx", + ) + else: + urlretrieve( + f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/openvino.xml", + f"{destination_folder}/{model_name}.xml", + ) + urlretrieve( + f"https://storage.openvinotoolkit.org/repositories/model_api/test/otx_models/{model_name}/openvino.bin", + f"{destination_folder}/{model_name}.bin", + ) def prepare_model( @@ -30,7 +36,7 @@ def prepare_model( public_scope = json.load(f) for model in public_scope: - if model["name"].endswith(".xml"): + if model["name"].endswith(".xml") or model["name"].endswith(".onnx"): continue model = eval(model["type"]).create_model(model["name"], download_dir=data_dir) @@ -72,3 +78,4 @@ def prepare_data(data_dir="./data"): prepare_data(args.data_dir) retrieve_otx_model(args.data_dir, "mlc_mobilenetv3_large_voc") retrieve_otx_model(args.data_dir, "tinynet_imagenet") + retrieve_otx_model(args.data_dir, "cls_mobilenetv3_large_cars", "onnx") diff --git a/tests/python/funtional/__init__.py b/tests/python/funtional/__init__.py new file mode 100644 index 00000000..7ac51207 --- /dev/null +++ b/tests/python/funtional/__init__.py @@ -0,0 +1,4 @@ +# +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# diff --git a/tests/python/funtional/conftest.py b/tests/python/funtional/conftest.py new file mode 100644 index 00000000..2ef51d3d --- /dev/null +++ b/tests/python/funtional/conftest.py @@ -0,0 +1,15 @@ +# +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +import pytest + + +def pytest_addoption(parser): + parser.addoption("--data", action="store", help="data folder with dataset") + + +@pytest.fixture(scope="session") +def data(pytestconfig): + return pytestconfig.getoption("data") diff --git a/tests/python/funtional/test_load.py b/tests/python/funtional/test_load.py index a0d79e0c..37dc6c98 100644 --- a/tests/python/funtional/test_load.py +++ b/tests/python/funtional/test_load.py @@ -2,13 +2,16 @@ # Copyright (C) 2020-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # + +from pathlib import Path + from model_api.models import Model -def test_model_with_unnamed_output_load(): +def test_model_with_unnamed_output_load(data): # the model's output doesn't have a name _ = Model.create_model( - "data/otx_models/tinynet_imagenet.xml", + Path(data) / "otx_models/tinynet_imagenet.xml", model_type="Classification", preload=True, ) diff --git a/tests/python/funtional/test_save.py b/tests/python/funtional/test_save.py index b69a9b48..339293b0 100644 --- a/tests/python/funtional/test_save.py +++ b/tests/python/funtional/test_save.py @@ -2,7 +2,14 @@ # Copyright (C) 2020-2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # + +from pathlib import Path + +import onnx + from model_api.models import Model +from model_api.adapters import ONNXRuntimeAdapter +from model_api.adapters.utils import load_parameters_from_onnx def test_detector_save(tmp_path): @@ -10,14 +17,15 @@ def test_detector_save(tmp_path): "ssd_mobilenet_v1_fpn_coco", configuration={"mean_values": [0, 0, 0], "confidence_threshold": 0.6}, ) + xml_path = str(tmp_path / "a.xml") + downloaded.save(xml_path) + deserialized = Model.create_model(xml_path) + assert ( - downloaded.get_model() + deserialized.get_model() .get_rt_info(["model_info", "embedded_processing"]) .astype(bool) ) - xml_path = str(tmp_path / "a.xml") - downloaded.save(xml_path) - deserialized = Model.create_model(xml_path) assert type(downloaded) is type(deserialized) for attr in downloaded.parameters(): assert getattr(downloaded, attr) == getattr(deserialized, attr) @@ -27,14 +35,15 @@ def test_classifier_save(tmp_path): downloaded = Model.create_model( "efficientnet-b0-pytorch", configuration={"scale_values": [1, 1, 1], "topk": 6} ) + xml_path = str(tmp_path / "a.xml") + downloaded.save(xml_path) + deserialized = Model.create_model(xml_path) + assert ( - downloaded.get_model() + deserialized.get_model() .get_rt_info(["model_info", "embedded_processing"]) .astype(bool) ) - xml_path = str(tmp_path / "a.xml") - downloaded.save(xml_path) - deserialized = Model.create_model(xml_path) assert type(downloaded) is type(deserialized) for attr in downloaded.parameters(): assert getattr(downloaded, attr) == getattr(deserialized, attr) @@ -45,14 +54,38 @@ def test_segmentor_save(tmp_path): "hrnet-v2-c1-segmentation", configuration={"reverse_input_channels": True, "labels": ["first", "second"]}, ) + xml_path = str(tmp_path / "a.xml") + downloaded.save(xml_path) + deserialized = Model.create_model(xml_path) + assert ( - downloaded.get_model() + deserialized.get_model() .get_rt_info(["model_info", "embedded_processing"]) .astype(bool) ) - xml_path = str(tmp_path / "a.xml") - downloaded.save(xml_path) - deserialized = Model.create_model(xml_path) assert type(downloaded) is type(deserialized) for attr in downloaded.parameters(): assert getattr(downloaded, attr) == getattr(deserialized, attr) + + +def test_onnx_save(tmp_path, data): + cls_model = Model.create_model( + ONNXRuntimeAdapter(Path(data) / "otx_models/cls_mobilenetv3_large_cars.onnx"), + model_type="Classification", + preload=True, + configuration={"reverse_input_channels": True, "topk": 6}, + ) + + onnx_path = str(tmp_path / "a.onnx") + cls_model.save(onnx_path) + deserialized = Model.create_model(onnx_path) + + assert ( + load_parameters_from_onnx(onnx.load(onnx_path))["model_info"][ + "embedded_processing" + ] + == "True" + ) + assert type(cls_model) is type(deserialized) + for attr in cls_model.parameters(): + assert getattr(cls_model, attr) == getattr(deserialized, attr)