1616
1717import logging as log
1818import re
19+ from abc import ABC
1920from contextlib import contextmanager
2021
2122from model_api .adapters .inference_adapter import InferenceAdapter
@@ -35,7 +36,7 @@ def __init__(self, wrapper_name, message):
3536 super ().__init__ (f"{ wrapper_name } : { message } " )
3637
3738
38- class Model :
39+ class Model ( ABC ) :
3940 """An abstract model wrapper
4041
4142 The abstract model wrapper is free from any executor dependencies.
@@ -61,7 +62,7 @@ class Model:
6162 model_loaded (bool): a flag whether the model is loaded to device
6263 """
6364
64- __model__ = None # Abstract wrapper has no name
65+ __model__ : str
6566
6667 def __init__ (self , inference_adapter , configuration = dict (), preload = False ):
6768 """Model constructor
@@ -101,19 +102,11 @@ def __init__(self, inference_adapter, configuration=dict(), preload=False):
101102 self .callback_fn = lambda _ : None
102103
103104 def get_model (self ):
104- """Returns the ov.Model object stored in the InferenceAdapter.
105-
106- Note: valid only for local inference
107-
108- Returns:
109- ov.Model object
110- Raises:
111- RuntimeError: in case of remote inference (serving)
112- """
113- if isinstance (self .inference_adapter , OpenvinoAdapter ):
114- return self .inference_adapter .get_model ()
115-
116- raise RuntimeError ("get_model() is not supported for remote inference" )
105+ model = self .inference_adapter .get_model ()
106+ model .set_rt_info (self .__model__ , ["model_info" , "model_type" ])
107+ for name in self .parameters ():
108+ model .set_rt_info (getattr (self , name ), ["model_info" , name ])
109+ return model
117110
118111 @classmethod
119112 def get_model_class (cls , name ):
@@ -281,8 +274,8 @@ def _load_config(self, config):
281274 errors = parameters [name ].validate (value )
282275 if errors :
283276 self .logger .error (f'Error with "{ name } " parameter:' )
284- for error in errors :
285- self .logger .error (f"\t { error } " )
277+ for _error in errors :
278+ self .logger .error (f"\t { _error } " )
286279 self .raise_error ("Incorrect user configuration" )
287280 value = parameters [name ].get_value (value )
288281 self .__setattr__ (name , value )
@@ -359,7 +352,7 @@ def _check_io_number(self, number_of_inputs, number_of_outputs):
359352 )
360353 )
361354 else :
362- if not len (self .inputs ) in number_of_inputs :
355+ if len (self .inputs ) not in number_of_inputs :
363356 self .raise_error (
364357 "Expected {} or {} input blobs, but {} found: {}" .format (
365358 ", " .join (str (n ) for n in number_of_inputs [:- 1 ]),
@@ -380,7 +373,7 @@ def _check_io_number(self, number_of_inputs, number_of_outputs):
380373 )
381374 )
382375 else :
383- if not len (self .outputs ) in number_of_outputs :
376+ if len (self .outputs ) not in number_of_outputs :
384377 self .raise_error (
385378 "Expected {} or {} output blobs, but {} found: {}" .format (
386379 ", " .join (str (n ) for n in number_of_outputs [:- 1 ]),
@@ -523,12 +516,6 @@ def log_layers_info(self):
523516 )
524517 )
525518
526- def get_model (self ):
527- model = self .inference_adapter .get_model ()
528- model .set_rt_info (self .__model__ , ["model_info" , "model_type" ])
529- for name in self .parameters ():
530- model .set_rt_info (getattr (self , name ), ["model_info" , name ])
531- return model
532519
533520 def save (self , xml_path , bin_path = "" , version = "UNSPECIFIED" ):
534521 import openvino
0 commit comments