Skip to content

Commit 6326e58

Browse files
committed
Add 2023.1 support; Fix string nullptr bug
1 parent 9112813 commit 6326e58

File tree

8 files changed

+41
-38
lines changed

8 files changed

+41
-38
lines changed

cmake/CMakeLists.txt

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1164,9 +1164,12 @@ if (onnxruntime_USE_OPENVINO)
11641164
elseif ($ENV{INTEL_OPENVINO_DIR} MATCHES "2023.0")
11651165
set(OPENVINO_VERSION "2023.0")
11661166
add_definitions(-DOPENVINO_2023_0=1)
1167+
elseif ($ENV{INTEL_OPENVINO_DIR} MATCHES "2023.1")
1168+
set(OPENVINO_VERSION "2023.1")
1169+
add_definitions(-DOPENVINO_2023_1=1)
11671170
elseif ($ENV{INTEL_OPENVINO_DIR} MATCHES "openvino")
1168-
set(OPENVINO_VERSION "2023.0")
1169-
add_definitions(-DOPENVINO_2023_0=1)
1171+
set(OPENVINO_VERSION "2023.1")
1172+
add_definitions(-DOPENVINO_2023_1=1)
11701173
else()
11711174
message(FATAL_ERROR "Unsupported OpenVINO version: ${INTEL_OPENVINO_DIR}")
11721175
endif()

include/onnxruntime/core/session/onnxruntime_c_api.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -593,7 +593,7 @@ typedef struct OrtOpenVINOProviderOptions {
593593
OrtOpenVINOProviderOptions() : device_type{},
594594
enable_vpu_fast_compile{},
595595
device_id{},
596-
num_of_threads{},
596+
num_of_threads{1},
597597
cache_dir{},
598598
context{},
599599
enable_opencl_throttling{},

onnxruntime/core/providers/openvino/backend_utils.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ CreateOVModel(const ONNX_NAMESPACE::ModelProto& model_proto, const GlobalContext
9696
}
9797
}
9898
#ifndef NDEBUG
99-
#if defined(OPENVINO_2022_3) || (OPENVINO_2023_0)
99+
#if defined(OPENVINO_2022_3) || (OPENVINO_2023_0) || (OPENVINO_2023_1)
100100
if (IsDebugEnabled()) {
101101
std::string name = cnn_network->get_friendly_name();
102102
ov::pass::Serialize serializer(name + ".xml", name + ".bin");

onnxruntime/core/providers/openvino/backends/basic_backend.cc

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ BasicBackend::BasicBackend(const ONNX_NAMESPACE::ModelProto& model_proto,
5757
LOGS_DEFAULT(INFO) << log_tag << "Loaded model to the plugin";
5858
} else {
5959
#if defined(OPENVINO_2023_0)
60-
if (!subgraph_context_.has_dynamic_input_shape && dev_prec!="CPU_FP16") {
60+
if (!subgraph_context_.has_dynamic_input_shape && dev_prec != "CPU_FP16") {
6161
const std::string model = model_proto.SerializeAsString();
6262
exe_network_ = global_context_.ie_core.LoadNetwork(model, hw_target, device_config, subgraph_context_.subgraph_name);
6363
LOGS_DEFAULT(INFO) << log_tag << "Loaded model to the plugin";
@@ -72,8 +72,8 @@ BasicBackend::BasicBackend(const ONNX_NAMESPACE::ModelProto& model_proto,
7272
LOGS_DEFAULT(INFO) << log_tag << "Loaded model to the plugin";
7373
#endif
7474
#else
75-
#if defined(OPENVINO_2023_0)
76-
if (!subgraph_context_.has_dynamic_input_shape && dev_prec!="CPU_FP16") {
75+
#if defined(OPENVINO_2023_0) || (OPENVINO_2023_1)
76+
if (!subgraph_context_.has_dynamic_input_shape && dev_prec != "CPU_FP16") {
7777
const std::string model = model_proto.SerializeAsString();
7878
exe_network_ = global_context_.ie_core.LoadNetwork(model, hw_target, device_config, subgraph_context_.subgraph_name);
7979
LOGS_DEFAULT(INFO) << log_tag << "Loaded model to the plugin";
@@ -124,18 +124,18 @@ BasicBackend::BasicBackend(const ONNX_NAMESPACE::ModelProto& model_proto,
124124
void BasicBackend::PopulateConfigValue(ov::AnyMap & device_config) {
125125
device_config = {};
126126
// Set inference precision based on device precision for OV backend
127-
if (global_context_.precision_str.find("FP16")!= std::string::npos && global_context_.device_type == "GPU"){
127+
if (global_context_.precision_str.find("FP16") != std::string::npos && global_context_.device_type == "GPU") {
128128
device_config.emplace(ov::hint::inference_precision("f16"));
129129
}
130-
if (global_context_.precision_str.find("FP32")!= std::string::npos){
130+
if (global_context_.precision_str.find("FP32") != std::string::npos) {
131131
device_config.emplace(ov::hint::inference_precision("f32"));
132132
}
133133
#ifndef NDEBUG
134134
if (openvino_ep::backend_utils::IsDebugEnabled()) {
135135
device_config.emplace(ov::enable_profiling(true));
136136
}
137137
#endif
138-
#if defined(OPENVINO_2023_0)
138+
#if defined(OPENVINO_2023_0) || (OPENVINO_2023_1)
139139
if (global_context_.device_type.find("VPUX") != std::string::npos) {
140140
std::pair<std::string, ov::Any> device_property;
141141
device_property = std::make_pair("VPUX_COMPILER_TYPE", "MLIR");
@@ -160,15 +160,15 @@ BasicBackend::BasicBackend(const ONNX_NAMESPACE::ModelProto& model_proto,
160160
}
161161
}
162162

163-
void BasicBackend::EnableGPUThrottling(ov::AnyMap& device_config) {
164-
if (global_context_.enable_opencl_throttling == true && global_context_.device_type.find("GPU") != std::string::npos) {
165-
LOGS_DEFAULT(INFO) << log_tag << "Enabled OpenCL queue throttling for GPU device";
166-
std::pair<std::string, ov::Any> device_property;
167-
device_property = std::make_pair("PLUGIN_THROTTLE", "1");
168-
device_config.emplace(ov::device::properties("GPU_CONFIG_KEY", device_property));
169-
// device_config[GPU_CONFIG_KEY(PLUGIN_THROTTLE)] = "1";
163+
void BasicBackend::EnableGPUThrottling(ov::AnyMap & device_config) {
164+
if (global_context_.enable_opencl_throttling == true && global_context_.device_type.find("GPU") != std::string::npos) {
165+
LOGS_DEFAULT(INFO) << log_tag << "Enabled OpenCL queue throttling for GPU device";
166+
std::pair<std::string, ov::Any> device_property;
167+
device_property = std::make_pair("PLUGIN_THROTTLE", "1");
168+
device_config.emplace(ov::device::properties("GPU_CONFIG_KEY", device_property));
169+
// device_config[GPU_CONFIG_KEY(PLUGIN_THROTTLE)] = "1";
170+
}
170171
}
171-
}
172172

173173
// Starts an asynchronous inference request for data in slice indexed by batch_slice_idx on
174174
// an Infer Request indexed by infer_req_idx

onnxruntime/core/providers/openvino/openvino_execution_provider.cc

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -137,6 +137,10 @@ OpenVINOExecutionProvider::GetCapability(const GraphViewer& graph_viewer,
137137
openvino_ep::GetCapability obj(graph_viewer,
138138
openvino_ep::BackendManager::GetGlobalContext().device_type, "V_2023_0");
139139
result = obj.Execute();
140+
#elif defined(OPENVINO_2023_1)
141+
openvino_ep::GetCapability obj(graph_viewer,
142+
openvino_ep::BackendManager::GetGlobalContext().device_type, "V_2023_1");
143+
result = obj.Execute();
140144
#endif
141145

142146
return result;

onnxruntime/core/providers/openvino/openvino_provider_factory.cc

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -64,31 +64,28 @@ struct OpenVINO_Provider : Provider {
6464

6565
std::shared_ptr<IExecutionProviderFactory> CreateExecutionProviderFactory(const void* void_params) override {
6666
auto& params = *reinterpret_cast<const OrtOpenVINOProviderOptions*>(void_params);
67-
std::string device_type = params.device_type;
68-
int num_of_threads = 1;
69-
std::set<std::string> ov_supported_device_types = {"CPU_FP32", "CPU_FP16", "GPU_FP32",
67+
if (params.device_type != nullptr) { // check for device_type correctness only if provided, skip checks otherwise
68+
std::string device_type = params.device_type;
69+
std::set<std::string> ov_supported_device_types = {"CPU_FP32", "CPU_FP16", "GPU_FP32",
7070
"GPU.0_FP32", "GPU.1_FP32", "GPU_FP16",
7171
"GPU.0_FP16", "GPU.1_FP16",
7272
"NPU_FP16", "NPU_U8"};
7373

7474
if (!((ov_supported_device_types.find(device_type) != ov_supported_device_types.end()) ||
7575
(device_type.find("HETERO:") == 0) || (device_type.find("MULTI:") == 0) || (device_type.find("AUTO:") == 0))) {
76-
LOGS_DEFAULT(ERROR) <<
77-
"[ERROR] [OpenVINO] You have selcted wrong configuration value for the key 'device_type'.\n "
78-
"Select from 'CPU_FP32', 'CPU_FP16', 'GPU_FP32', 'GPU.0_FP32', 'GPU.1_FP32', 'GPU_FP16', "
79-
"'GPU.0_FP16', 'GPU.1_FP16', 'NPU_FP16', 'NPU_U8' or from"
80-
" HETERO/MULTI/AUTO options available. \n";
76+
LOGS_DEFAULT(ERROR) << "[ERROR] [OpenVINO] You have selcted wrong configuration value for the key 'device_type'.\n "
77+
"Select from 'CPU_FP32', 'CPU_FP16', 'GPU_FP32', 'GPU.0_FP32', 'GPU.1_FP32', 'GPU_FP16', "
78+
"'GPU.0_FP16', 'GPU.1_FP16', 'NPU_FP16', 'NPU_U8' or from"
79+
" HETERO/MULTI/AUTO options available. \n";
8180
}
82-
83-
num_of_threads = params.num_of_threads;
81+
}
82+
int num_of_threads = params.num_of_threads;
8483
if (num_of_threads <= 0) {
8584
num_of_threads = 1;
8685
LOGS_DEFAULT(WARNING) << "[OpenVINO-EP] The value for the key 'num_threads' should be in the positive range.\n "
87-
<< "Executing with num_threads=1";
86+
<< "Executing with num_threads=1";
8887
}
8988

90-
91-
9289
return std::make_shared<OpenVINOProviderFactory>(params.device_type, params.enable_vpu_fast_compile,
9390
params.device_id, num_of_threads,
9491
params.cache_dir,

onnxruntime/core/providers/openvino/ov_interface.cc

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ OVExeNetwork OVCore::LoadNetwork(std::shared_ptr<OVNetwork>& ie_cnn_network, std
4242
}
4343
}
4444

45-
#if defined(OPENVINO_2023_0)
45+
#if defined(OPENVINO_2023_0) || (OPENVINO_2023_1)
4646
OVExeNetwork OVCore::LoadNetwork(const std::string& model, std::string& hw_target, ov::AnyMap& device_config, std::string name) {
4747
ov::CompiledModel obj;
4848
try {
@@ -78,10 +78,10 @@ std::vector<std::string> OVCore::GetAvailableDevices() {
7878
auto available_devices = oe.get_available_devices();
7979
for (int i = 0; i < int(available_devices.size()); i++) {
8080
if (available_devices[i].find("GPU") != std::string::npos) {
81-
std::string luid_str = oe.get_property(available_devices[i], ov::device::luid.name()).as<std::string>();
82-
available_devices[i] = available_devices[i]+"_"+ luid_str;
83-
}
81+
std::string luid_str = oe.get_property(available_devices[i], ov::device::luid.name()).as<std::string>();
82+
available_devices[i] = available_devices[i] + "_" + luid_str;
8483
}
84+
}
8585
return available_devices;
8686
}
8787

onnxruntime/core/providers/openvino/ov_interface.h

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,7 @@
55

66
#include <vector>
77

8-
9-
#if defined (OPENVINO_2022_1) || (OPENVINO_2022_2) || (OPENVINO_2022_3) || (OPENVINO_2023_0)
8+
#if defined(OPENVINO_2022_1) || (OPENVINO_2022_2) || (OPENVINO_2022_3) || (OPENVINO_2023_0) || (OPENVINO_2023_1)
109
#define OV_API_20
1110
#include "openvino/openvino.hpp"
1211
#include "openvino/pass/convert_fp32_to_fp16.hpp"
@@ -45,7 +44,7 @@ class OVCore {
4544
public:
4645
std::shared_ptr<OVNetwork> ReadModel(const std::string& model_stream) const;
4746
OVExeNetwork LoadNetwork(std::shared_ptr<OVNetwork>& ie_cnn_network, std::string& hw_target, ov::AnyMap& device_config, std::string name);
48-
#if defined(OPENVINO_2023_0)
47+
#if defined(OPENVINO_2023_0) || (OPENVINO_2023_1)
4948
OVExeNetwork LoadNetwork(const std::string& model_stream, std::string& hw_target, ov::AnyMap& device_config, std::string name);
5049
#endif
5150
void SetCache(std::string cache_dir_path);

0 commit comments

Comments
 (0)