Skip to content

Commit 4818b4f

Browse files
authored
refactor: modularize MNN and OpenVINO Inference Engines (#629)
* refactor: modularize mnn and openvino inference engines * Revert test_engine.py to original version
1 parent 86c527e commit 4818b4f

File tree

6 files changed

+76
-50
lines changed

6 files changed

+76
-50
lines changed
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# -*- encoding: utf-8 -*-
2+
# @Author: SWHL
3+
# @Contact: liekkaskono@163.com
4+
from .main import MNNError, MNNInferSession

python/rapidocr/inference_engine/mnn.py renamed to python/rapidocr/inference_engine/mnn/main.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
11
# -*- encoding: utf-8 -*-
2+
# @Author: SWHL
3+
# @Contact: liekkaskono@163.com
24
import traceback
35
from pathlib import Path
46

57
import MNN
68
import numpy as np
79
from omegaconf import DictConfig
810

9-
from ..utils.download_file import DownloadFile, DownloadFileInput
10-
from ..utils.log import logger
11-
from .base import FileInfo, InferSession
11+
from ...utils.download_file import DownloadFile, DownloadFileInput
12+
from ...utils.log import logger
13+
from ..base import FileInfo, InferSession
1214

1315

1416
class MNNInferSession(InferSession):
@@ -59,9 +61,7 @@ def __call__(self, img: np.ndarray) -> np.ndarray:
5961
output = self.interpreter.getSessionOutput(self.session)
6062
out_shape = output.getShape()
6163
out_tensor = MNN.Tensor(
62-
out_shape,
63-
MNN.Halide_Type_Float,
64-
MNN.Tensor_DimensionType_Caffe
64+
out_shape, MNN.Halide_Type_Float, MNN.Tensor_DimensionType_Caffe
6565
)
6666
output.copyToHostTensor(out_tensor)
6767

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# -*- encoding: utf-8 -*-
2+
# @Author: SWHL
3+
# @Contact: liekkaskono@163.com
4+
from .main import OpenVINOInferSession, OpenVIONError
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
# -*- encoding: utf-8 -*-
2+
# @Author: SWHL
3+
# @Contact: liekkaskono@163.com
4+
import os
5+
from typing import Any, Dict
6+
7+
from omegaconf import DictConfig
8+
9+
from ...utils.log import logger
10+
11+
12+
class CPUConfig:
13+
"""Configuration handler for OpenVINO CPU execution."""
14+
15+
def __init__(self, engine_cfg: DictConfig):
16+
self.cfg = engine_cfg
17+
18+
def get_config(self) -> Dict[str, Any]:
19+
"""Build OpenVINO CPU configuration dictionary."""
20+
config = {}
21+
22+
infer_num_threads = self.cfg.get("inference_num_threads", -1)
23+
if infer_num_threads != -1 and 1 <= infer_num_threads <= os.cpu_count():
24+
config["INFERENCE_NUM_THREADS"] = str(infer_num_threads)
25+
26+
performance_hint = self.cfg.get("performance_hint", None)
27+
if performance_hint is not None:
28+
config["PERFORMANCE_HINT"] = str(performance_hint)
29+
30+
performance_num_requests = self.cfg.get("performance_num_requests", -1)
31+
if performance_num_requests != -1:
32+
config["PERFORMANCE_HINT_NUM_REQUESTS"] = str(performance_num_requests)
33+
34+
enable_cpu_pinning = self.cfg.get("enable_cpu_pinning", None)
35+
if enable_cpu_pinning is not None:
36+
config["ENABLE_CPU_PINNING"] = str(enable_cpu_pinning)
37+
38+
num_streams = self.cfg.get("num_streams", -1)
39+
if num_streams != -1:
40+
config["NUM_STREAMS"] = str(num_streams)
41+
42+
enable_hyper_threading = self.cfg.get("enable_hyper_threading", None)
43+
if enable_hyper_threading is not None:
44+
config["ENABLE_HYPER_THREADING"] = str(enable_hyper_threading)
45+
46+
scheduling_core_type = self.cfg.get("scheduling_core_type", None)
47+
if scheduling_core_type is not None:
48+
config["SCHEDULING_CORE_TYPE"] = str(scheduling_core_type)
49+
50+
logger.info(f"Using OpenVINO config: {config}")
51+
return config

python/rapidocr/inference_engine/openvino.py renamed to python/rapidocr/inference_engine/openvino/main.py

Lines changed: 9 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,21 @@
11
# -*- encoding: utf-8 -*-
22
# @Author: SWHL
33
# @Contact: liekkaskono@163.com
4-
import os
54
import traceback
65
from pathlib import Path
7-
from typing import Any, Dict
86

97
import numpy as np
108
from omegaconf import DictConfig
9+
1110
try:
1211
from openvino import Core
13-
except ImportError: # fallback for older OpenVINO versions
12+
except ImportError:
1413
from openvino.runtime import Core
1514

16-
from ..utils.download_file import DownloadFile, DownloadFileInput
17-
from ..utils.log import logger
18-
from .base import FileInfo, InferSession
15+
from ...utils.download_file import DownloadFile, DownloadFileInput
16+
from ...utils.log import logger
17+
from ..base import FileInfo, InferSession
18+
from .device_config import CPUConfig
1919

2020

2121
class OpenVINOInferSession(InferSession):
@@ -48,48 +48,14 @@ def __init__(self, cfg: DictConfig):
4848
model_path = Path(model_path)
4949
self._verify_model(model_path)
5050

51-
config = self._init_config(cfg)
52-
core.set_property("CPU", config)
51+
# Use dedicated config class
52+
cpu_config = CPUConfig(cfg.get("engine_cfg", {}))
53+
core.set_property("CPU", cpu_config.get_config())
5354

5455
model_onnx = core.read_model(model_path)
5556
compile_model = core.compile_model(model=model_onnx, device_name="CPU")
5657
self.session = compile_model.create_infer_request()
5758

58-
def _init_config(self, cfg: DictConfig) -> Dict[Any, Any]:
59-
config = {}
60-
engine_cfg = cfg.get("engine_cfg", {})
61-
62-
infer_num_threads = engine_cfg.get("inference_num_threads", -1)
63-
if infer_num_threads != -1 and 1 <= infer_num_threads <= os.cpu_count():
64-
config["INFERENCE_NUM_THREADS"] = str(infer_num_threads)
65-
66-
performance_hint = engine_cfg.get("performance_hint", None)
67-
if performance_hint is not None:
68-
config["PERFORMANCE_HINT"] = str(performance_hint)
69-
70-
performance_num_requests = engine_cfg.get("performance_num_requests", -1)
71-
if performance_num_requests != -1:
72-
config["PERFORMANCE_HINT_NUM_REQUESTS"] = str(performance_num_requests)
73-
74-
enable_cpu_pinning = engine_cfg.get("enable_cpu_pinning", None)
75-
if enable_cpu_pinning is not None:
76-
config["ENABLE_CPU_PINNING"] = str(enable_cpu_pinning)
77-
78-
num_streams = engine_cfg.get("num_streams", -1)
79-
if num_streams != -1:
80-
config["NUM_STREAMS"] = str(num_streams)
81-
82-
enable_hyper_threading = engine_cfg.get("enable_hyper_threading", None)
83-
if enable_hyper_threading is not None:
84-
config["ENABLE_HYPER_THREADING"] = str(enable_hyper_threading)
85-
86-
scheduling_core_type = engine_cfg.get("scheduling_core_type", None)
87-
if scheduling_core_type is not None:
88-
config["SCHEDULING_CORE_TYPE"] = str(scheduling_core_type)
89-
90-
logger.info(f"Using OpenVINO config: {config}")
91-
return config
92-
9359
def __call__(self, input_content: np.ndarray) -> np.ndarray:
9460
try:
9561
self.session.infer(inputs=[input_content])

python/rapidocr/inference_engine/paddle/device_config.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,8 @@ def config_cpu(self):
7070

7171
if hasattr(self.infer_opts, "enable_new_executor"):
7272
self.infer_opts.enable_new_executor()
73-
self.infer_opts.set_optimization_level(3)
73+
if hasattr(self.infer_opts, "set_optimization_level"):
74+
self.infer_opts.set_optimization_level(3)
7475

7576
@staticmethod
7677
def setup_device_envs(envs):

0 commit comments

Comments
 (0)