Skip to content

Commit 10e1de7

Browse files
dkurtvpisarev
authored andcommitted
Intel Inference Engine deep learning backend (opencv#10608)
* Intel Inference Engine deep learning backend. * OpenFace network using Inference Engine backend
1 parent 292dfc2 commit 10e1de7

26 files changed

+1379
-49
lines changed

CMakeLists.txt

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -223,6 +223,7 @@ OCV_OPTION(WITH_GTK "Include GTK support" ON
223223
OCV_OPTION(WITH_GTK_2_X "Use GTK version 2" OFF IF (UNIX AND NOT APPLE AND NOT ANDROID) )
224224
OCV_OPTION(WITH_IPP "Include Intel IPP support" (NOT MINGW AND NOT CV_DISABLE_OPTIMIZATION) IF (X86_64 OR X86) AND NOT WINRT AND NOT IOS )
225225
OCV_OPTION(WITH_HALIDE "Include Halide support" OFF)
226+
OCV_OPTION(WITH_INF_ENGINE "Include Intel Inference Engine support" OFF)
226227
OCV_OPTION(WITH_JASPER "Include JPEG2K support" ON IF (NOT IOS) )
227228
OCV_OPTION(WITH_JPEG "Include JPEG support" ON)
228229
OCV_OPTION(WITH_WEBP "Include WebP support" ON IF (NOT WINRT) )
@@ -669,6 +670,11 @@ if(WITH_HALIDE)
669670
include(cmake/OpenCVDetectHalide.cmake)
670671
endif()
671672

673+
# --- Inference Engine ---
674+
if(WITH_INF_ENGINE)
675+
include(cmake/OpenCVDetectInferenceEngine.cmake)
676+
endif()
677+
672678
# --- DirectX ---
673679
if(WITH_DIRECTX)
674680
include(cmake/OpenCVDetectDirectX.cmake)
@@ -1353,6 +1359,10 @@ if(WITH_HALIDE OR HAVE_HALIDE)
13531359
status(" Halide:" HAVE_HALIDE THEN "YES (${HALIDE_LIBRARIES} ${HALIDE_INCLUDE_DIRS})" ELSE NO)
13541360
endif()
13551361

1362+
if(WITH_INF_ENGINE OR HAVE_INF_ENGINE)
1363+
status(" Inference Engine:" HAVE_INF_ENGINE THEN "YES (${INF_ENGINE_LIBRARIES} ${INF_ENGINE_INCLUDE_DIRS})" ELSE NO)
1364+
endif()
1365+
13561366
if(WITH_EIGEN OR HAVE_EIGEN)
13571367
status(" Eigen:" HAVE_EIGEN THEN "YES (ver ${EIGEN_WORLD_VERSION}.${EIGEN_MAJOR_VERSION}.${EIGEN_MINOR_VERSION})" ELSE NO)
13581368
endif()
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
# The script detects Intel(R) Inference Engine installation
2+
#
3+
# Parameters:
4+
# INTEL_CVSDK_DIR - Path to Inference Engine root folder
5+
# IE_PLUGINS_PATH - Path to folder with Inference Engine plugins
6+
#
7+
# On return this will define:
8+
#
9+
# HAVE_INF_ENGINE - True if Intel Inference Engine was found
10+
# INF_ENGINE_INCLUDE_DIRS - Inference Engine include folder
11+
# INF_ENGINE_LIBRARIES - Inference Engine libraries and it's dependencies
12+
#
13+
macro(ie_fail)
14+
set(HAVE_INF_ENGINE FALSE)
15+
return()
16+
endmacro()
17+
18+
if(NOT INF_ENGINE_ROOT_DIR OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}/inference_engine/include/inference_engine.hpp")
19+
set(ie_root_paths "${INF_ENGINE_ROOT_DIR}")
20+
if(DEFINED ENV{INTEL_CVSDK_DIR})
21+
list(APPEND ie_root_paths "$ENV{INTEL_CVSDK_DIR}")
22+
endif()
23+
24+
if(WITH_INF_ENGINE AND NOT ie_root_paths)
25+
list(APPEND ie_root_paths "/opt/intel/deeplearning_deploymenttoolkit/deployment_tools")
26+
endif()
27+
28+
find_path(INF_ENGINE_ROOT_DIR inference_engine/include/inference_engine.hpp PATHS ${ie_root_paths})
29+
endif()
30+
31+
set(INF_ENGINE_INCLUDE_DIRS "${INF_ENGINE_ROOT_DIR}/inference_engine/include" CACHE PATH "Path to Inference Engine include directory")
32+
33+
if(NOT INF_ENGINE_ROOT_DIR
34+
OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}"
35+
OR NOT EXISTS "${INF_ENGINE_INCLUDE_DIRS}"
36+
OR NOT EXISTS "${INF_ENGINE_INCLUDE_DIRS}/inference_engine.hpp"
37+
)
38+
ie_fail()
39+
endif()
40+
41+
set(INF_ENGINE_LIBRARIES "")
42+
foreach(lib inference_engine mklml_intel iomp5)
43+
find_library(${lib}
44+
NAMES ${lib}
45+
HINTS ${IE_PLUGINS_PATH}
46+
HINTS "$ENV{IE_PLUGINS_PATH}"
47+
HINTS ${INF_ENGINE_ROOT_DIR}/external/mklml_lnx/lib
48+
)
49+
if(NOT ${lib})
50+
ie_fail()
51+
endif()
52+
list(APPEND INF_ENGINE_LIBRARIES ${${lib}})
53+
endforeach()
54+
55+
set(HAVE_INF_ENGINE TRUE)
56+
57+
include_directories(${INF_ENGINE_INCLUDE_DIRS})
58+
list(APPEND OPENCV_LINKER_LIBS ${INF_ENGINE_LIBRARIES})
59+
add_definitions(-DHAVE_INF_ENGINE)

modules/core/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ ocv_create_module(${extra_libs})
5959

6060
ocv_target_link_libraries(${the_module} LINK_PRIVATE
6161
"${ZLIB_LIBRARIES}" "${OPENCL_LIBRARIES}" "${VA_LIBRARIES}"
62-
"${LAPACK_LIBRARIES}" "${CPUFEATURES_LIBRARIES}" "${HALIDE_LIBRARIES}"
62+
"${LAPACK_LIBRARIES}" "${CPUFEATURES_LIBRARIES}" "${HALIDE_LIBRARIES}" "${INF_ENGINE_LIBRARIES}"
6363
"${ITT_LIBRARIES}"
6464
"${OPENCV_HAL_LINKER_LIBS}"
6565
)

modules/dnn/CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ else()
2727
-Wunused-parameter -Wunused-local-typedefs -Wsign-compare -Wsign-promo
2828
-Wundef -Wtautological-undefined-compare -Wignored-qualifiers -Wextra
2929
-Wunused-function -Wunused-const-variable -Wdeprecated-declarations
30+
-Werror=non-virtual-dtor
3031
)
3132
endif()
3233

modules/dnn/include/opencv2/dnn/dnn.hpp

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,8 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
7070
enum Backend
7171
{
7272
DNN_BACKEND_DEFAULT,
73-
DNN_BACKEND_HALIDE
73+
DNN_BACKEND_HALIDE,
74+
DNN_BACKEND_INFERENCE_ENGINE
7475
};
7576

7677
/**
@@ -242,6 +243,8 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
242243
*/
243244
virtual Ptr<BackendNode> initHalide(const std::vector<Ptr<BackendWrapper> > &inputs);
244245

246+
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> > &inputs);
247+
245248
/**
246249
* @brief Automatic Halide scheduling based on layer hyper-parameters.
247250
* @param[in] node Backend node with Halide functions.

modules/dnn/perf/perf_net.cpp

Lines changed: 57 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,7 @@
1313
namespace
1414
{
1515

16-
#ifdef HAVE_HALIDE
17-
#define TEST_DNN_BACKEND DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE
18-
#else
19-
#define TEST_DNN_BACKEND DNN_BACKEND_DEFAULT
20-
#endif
21-
#define TEST_DNN_TARGET DNN_TARGET_CPU, DNN_TARGET_OPENCL
22-
23-
CV_ENUM(DNNBackend, DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE)
16+
CV_ENUM(DNNBackend, DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE, DNN_BACKEND_INFERENCE_ENGINE)
2417
CV_ENUM(DNNTarget, DNN_TARGET_CPU, DNN_TARGET_OPENCL)
2518

2619
class DNNTestNetwork : public ::perf::TestBaseWithParam< tuple<DNNBackend, DNNTarget> >
@@ -31,13 +24,16 @@ class DNNTestNetwork : public ::perf::TestBaseWithParam< tuple<DNNBackend, DNNTa
3124

3225
dnn::Net net;
3326

34-
void processNet(std::string weights, std::string proto, std::string halide_scheduler,
35-
const Mat& input, const std::string& outputLayer,
36-
const std::string& framework)
27+
DNNTestNetwork()
3728
{
3829
backend = (dnn::Backend)(int)get<0>(GetParam());
3930
target = (dnn::Target)(int)get<1>(GetParam());
31+
}
4032

33+
void processNet(std::string weights, std::string proto, std::string halide_scheduler,
34+
const Mat& input, const std::string& outputLayer,
35+
const std::string& framework)
36+
{
4137
if (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL)
4238
{
4339
#if defined(HAVE_OPENCL)
@@ -47,6 +43,8 @@ class DNNTestNetwork : public ::perf::TestBaseWithParam< tuple<DNNBackend, DNNTa
4743
throw ::SkipTestException("OpenCL is not available/disabled in OpenCV");
4844
}
4945
}
46+
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
47+
throw SkipTestException("Skip OpenCL target of Inference Engine backend");
5048

5149
randu(input, 0.0f, 1.0f);
5250

@@ -117,7 +115,7 @@ PERF_TEST_P_(DNNTestNetwork, GoogLeNet)
117115
"", Mat(cv::Size(224, 224), CV_32FC3), "prob", "caffe");
118116
}
119117

120-
PERF_TEST_P_(DNNTestNetwork, ResNet50)
118+
PERF_TEST_P_(DNNTestNetwork, ResNet_50)
121119
{
122120
processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
123121
"resnet_50.yml", Mat(cv::Size(224, 224), CV_32FC3), "prob", "caffe");
@@ -131,19 +129,22 @@ PERF_TEST_P_(DNNTestNetwork, SqueezeNet_v1_1)
131129

132130
PERF_TEST_P_(DNNTestNetwork, Inception_5h)
133131
{
132+
if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("");
134133
processNet("dnn/tensorflow_inception_graph.pb", "",
135134
"inception_5h.yml",
136135
Mat(cv::Size(224, 224), CV_32FC3), "softmax2", "tensorflow");
137136
}
138137

139138
PERF_TEST_P_(DNNTestNetwork, ENet)
140139
{
140+
if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("");
141141
processNet("dnn/Enet-model-best.net", "", "enet.yml",
142142
Mat(cv::Size(512, 256), CV_32FC3), "l367_Deconvolution", "torch");
143143
}
144144

145145
PERF_TEST_P_(DNNTestNetwork, SSD)
146146
{
147+
if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("");
147148
processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel", "dnn/ssd_vgg16.prototxt", "disabled",
148149
Mat(cv::Size(300, 300), CV_32FC3), "detection_out", "caffe");
149150
}
@@ -162,15 +163,53 @@ PERF_TEST_P_(DNNTestNetwork, MobileNet_SSD_Caffe)
162163

163164
PERF_TEST_P_(DNNTestNetwork, MobileNet_SSD_TensorFlow)
164165
{
166+
if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException("");
165167
processNet("dnn/ssd_mobilenet_v1_coco.pb", "ssd_mobilenet_v1_coco.pbtxt", "",
166168
Mat(cv::Size(300, 300), CV_32FC3), "", "tensorflow");
167169
}
168170

169-
INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork,
170-
testing::Combine(
171-
::testing::Values(TEST_DNN_BACKEND),
172-
DNNTarget::all()
173-
)
174-
);
171+
PERF_TEST_P_(DNNTestNetwork, DenseNet_121)
172+
{
173+
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
174+
processNet("dnn/DenseNet_121.caffemodel", "dnn/DenseNet_121.prototxt", "",
175+
Mat(cv::Size(224, 224), CV_32FC3), "", "caffe");
176+
}
177+
178+
PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_coco)
179+
{
180+
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
181+
processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt", "",
182+
Mat(cv::Size(368, 368), CV_32FC3), "", "caffe");
183+
}
184+
185+
PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_mpi)
186+
{
187+
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
188+
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt", "",
189+
Mat(cv::Size(368, 368), CV_32FC3), "", "caffe");
190+
}
191+
192+
PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
193+
{
194+
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
195+
// The same .caffemodel but modified .prototxt
196+
// See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp
197+
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi_faster_4_stages.prototxt", "",
198+
Mat(cv::Size(368, 368), CV_32FC3), "", "caffe");
199+
}
200+
201+
const tuple<DNNBackend, DNNTarget> testCases[] = {
202+
#ifdef HAVE_HALIDE
203+
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_HALIDE, DNN_TARGET_CPU),
204+
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_HALIDE, DNN_TARGET_OPENCL),
205+
#endif
206+
#ifdef HAVE_INF_ENGINE
207+
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU),
208+
#endif
209+
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_DEFAULT, DNN_TARGET_CPU),
210+
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_DEFAULT, DNN_TARGET_OPENCL)
211+
};
212+
213+
INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork, testing::ValuesIn(testCases));
175214

176215
} // namespace

0 commit comments

Comments
 (0)