Skip to content

Commit 0a4002f

Browse files
authored
CHERRY_PICK: Better TensorRT support (#20858) (#21578)
* Fix TensorRT detection bug 1. Add new search path for TensorRT at tensorrt.cmake 2. Add better debug message 3. Fix the bug of detection of TensorRT version In NVIDIA official docker image, TensorRT headers are located at `/usr/include/x86_64-linux-gnu` and TensorRT libraries are located at `/usr/lib/x86_64-linux-gnu`, so using `-DTENSORRT_ROOT` will fail to detect TensorRT. There is no debug/warning message to tell developer that TensorRT is failed to be detected. In later version of TensorRT (e.g. v6), `NV_TENSORRT_MAJOR` is defined at `NvInferVersion.h` instead of `NvInfer.h`, so add compatibility fix. * Fix TensorRT variables in CMake 1. Replace `${TENSORRT_ROOT}/include` with `${TENSORRT_INCLUDE_DIR}` 2. Replace `${TENSORRT_ROOT}/lib` with `${TENSORRT_LIBRARY}` Manually type path may locate incorrect path of TensorRT. Use the paths detected by system instead. * Fix TensorRT library path 1. Add new variable - `${TENSORRT_LIBRARY_DIR}` 2. Fix TensorRT library path inference_lib.cmake and setup.py.in need the path of TensorRT library instead of the file of TensorRT library, so add new variable to fix it. * Add more general search rule for TensoRT Let system detect architecture instead of manually assign it, so replace `x86_64-linux-gnu` with `${CMAKE_LIBRARY_ARCHITECTURE}`. * Add more general search rule for TensorRT Remove duplicate search rules for TensorRT libraries. Use `${TENSORRT_LIBRARY_DIR}` to get full path of libnvinfer.so test=release/1.6
1 parent a6433f8 commit 0a4002f

File tree

3 files changed

+30
-4
lines changed

3 files changed

+30
-4
lines changed

cmake/inference_lib.cmake

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ endif ()
161161
if (TENSORRT_FOUND)
162162
set(dst_dir "${FLUID_INFERENCE_INSTALL_DIR}/third_party/install/tensorrt")
163163
copy(inference_lib_dist
164-
SRCS ${TENSORRT_ROOT}/include/Nv*.h ${TENSORRT_ROOT}/lib/*nvinfer*
164+
SRCS ${TENSORRT_INCLUDE_DIR}/Nv*.h ${TENSORRT_LIBRARY_DIR}/*nvinfer*
165165
DSTS ${dst_dir}/include ${dst_dir}/lib)
166166
endif ()
167167

cmake/tensorrt.cmake

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,23 @@ endif()
1919

2020
find_path(TENSORRT_INCLUDE_DIR NvInfer.h
2121
PATHS ${TENSORRT_ROOT} ${TENSORRT_ROOT}/include
22+
${TENSORRT_ROOT}/include/${CMAKE_LIBRARY_ARCHITECTURE}
2223
$ENV{TENSORRT_ROOT} $ENV{TENSORRT_ROOT}/include
24+
$ENV{TENSORRT_ROOT}/include/${CMAKE_LIBRARY_ARCHITECTURE}
2325
NO_DEFAULT_PATH
2426
)
2527

26-
find_library(TENSORRT_LIBRARY NAMES ${TR_INFER_LIB} ${TR_INFER_RT}
28+
find_path(TENSORRT_LIBRARY_DIR NAMES ${TR_INFER_LIB} ${TR_INFER_RT}
2729
PATHS ${TENSORRT_ROOT} ${TENSORRT_ROOT}/lib
30+
${TENSORRT_ROOT}/lib/${CMAKE_LIBRARY_ARCHITECTURE}
2831
$ENV{TENSORRT_ROOT} $ENV{TENSORRT_ROOT}/lib
32+
$ENV{TENSORRT_ROOT}/lib/${CMAKE_LIBRARY_ARCHITECTURE}
33+
NO_DEFAULT_PATH
34+
DOC "Path to TensorRT library."
35+
)
36+
37+
find_library(TENSORRT_LIBRARY NAMES ${TR_INFER_LIB} ${TR_INFER_RT}
38+
PATHS ${TENSORRT_LIBRARY_DIR}
2939
NO_DEFAULT_PATH
3040
DOC "Path to TensorRT library.")
3141

@@ -35,12 +45,28 @@ if(TENSORRT_INCLUDE_DIR AND TENSORRT_LIBRARY)
3545
endif(WITH_DSO)
3646
else()
3747
set(TENSORRT_FOUND OFF)
48+
if(WITH_DSO)
49+
message(WARNING "TensorRT is NOT found.")
50+
else(WITH_DSO)
51+
message(WARNING "TensorRT is disabled because WITH_DSO is OFF.")
52+
endif(WITH_DSO)
3853
endif()
3954

4055
if(TENSORRT_FOUND)
4156
file(READ ${TENSORRT_INCLUDE_DIR}/NvInfer.h TENSORRT_VERSION_FILE_CONTENTS)
4257
string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
4358
"${TENSORRT_VERSION_FILE_CONTENTS}")
59+
60+
if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
61+
file(READ ${TENSORRT_INCLUDE_DIR}/NvInferVersion.h TENSORRT_VERSION_FILE_CONTENTS)
62+
string(REGEX MATCH "define NV_TENSORRT_MAJOR +([0-9]+)" TENSORRT_MAJOR_VERSION
63+
"${TENSORRT_VERSION_FILE_CONTENTS}")
64+
endif()
65+
66+
if("${TENSORRT_MAJOR_VERSION}" STREQUAL "")
67+
message(SEND_ERROR "Failed to detect TensorRT version.")
68+
endif()
69+
4470
string(REGEX REPLACE "define NV_TENSORRT_MAJOR +([0-9]+)" "\\1"
4571
TENSORRT_MAJOR_VERSION "${TENSORRT_MAJOR_VERSION}")
4672

python/setup.py.in

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -173,8 +173,8 @@ package_data['paddle.libs']=[('libwarpctc' if os.name != 'nt' else 'warpctc') +
173173
shutil.copy('${WARPCTC_LIBRARIES}', libs_path)
174174

175175
if '${TENSORRT_FOUND}' == 'ON' and os.name == 'nt':
176-
shutil.copy(os.path.join('${TENSORRT_ROOT}', 'lib', '${TR_INFER_RT}'), libs_path)
177-
shutil.copy(os.path.join('${TENSORRT_ROOT}', 'lib', '${TR_INFER_PLUGIN_RT}'), libs_path)
176+
shutil.copy(os.path.join('${TENSORRT_LIBRARY_DIR}', '${TR_INFER_RT}'), libs_path)
177+
shutil.copy(os.path.join('${TENSORRT_LIBRARY_DIR}', '${TR_INFER_PLUGIN_RT}'), libs_path)
178178
package_data['paddle.libs'] += ['${TR_INFER_RT}', '${TR_INFER_PLUGIN_RT}']
179179

180180
if '${WITH_MKL}' == 'ON':

0 commit comments

Comments
 (0)