Skip to content

Commit fc63aa7

Browse files
committed
add inference-only fluid library
1 parent dcfb687 commit fc63aa7

File tree

2 files changed

+40
-17
lines changed

2 files changed

+40
-17
lines changed

CMakeLists.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -127,6 +127,9 @@ set(THIRD_PARTY_PATH "${CMAKE_BINARY_DIR}/third_party" CACHE STRING
127127
set(FLUID_INSTALL_DIR "${CMAKE_BINARY_DIR}/fluid_install_dir" CACHE STRING
128128
"A path setting fluid shared and static libraries")
129129

130+
set(FLUID_INFERENCE_INSTALL_DIR "${CMAKE_BINARY_DIR}/fluid_inference_install_dir" CACHE STRING
131+
"A path setting fluid inference shared and static libraries")
132+
130133
if (WITH_C_API AND WITH_PYTHON)
131134
message(WARNING "It is suggest not embedded a python interpreter in Paddle "
132135
"when using C-API. It will give an unpredictable behavior when using a "

cmake/inference_lib.cmake

Lines changed: 37 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -150,16 +150,16 @@ if (WITH_ANAKIN AND WITH_MKL)
150150
SRCS
151151
${PADDLE_BINARY_DIR}/paddle/fluid/inference/api/libinference_anakin_api* # compiled anakin api
152152
${ANAKIN_INSTALL_DIR} # anakin release
153-
DSTS ${dst_dir}/inference/anakin ${FLUID_INSTALL_DIR}/third_party/install/anakin)
153+
DSTS ${FLUID_INSTALL_DIR}/third_party/install/anakin ${FLUID_INSTALL_DIR}/third_party/install/anakin)
154154
list(APPEND inference_deps anakin_inference_lib)
155155
endif()
156156

157157
set(module "inference")
158158
copy(inference_lib DEPS ${inference_deps}
159159
SRCS ${src_dir}/${module}/*.h ${PADDLE_BINARY_DIR}/paddle/fluid/inference/libpaddle_fluid.*
160-
${src_dir}/${module}/api/paddle_inference_api.h ${src_dir}/${module}/api/demo_ci
160+
${src_dir}/${module}/api/paddle_inference_api.h
161161
${PADDLE_BINARY_DIR}/paddle/fluid/inference/api/paddle_inference_pass.h
162-
DSTS ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module}
162+
DSTS ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module} ${dst_dir}/${module}
163163
)
164164

165165
set(module "platform")
@@ -188,18 +188,38 @@ copy(cmake_cache
188188
# This command generates a complete fluid library for both train and inference
189189
add_custom_target(fluid_lib_dist DEPENDS ${fluid_lib_dist_dep})
190190

191+
# Following commands generate a inference-only fluid library
192+
# third_party, version.txt and CMakeCache.txt are the same position with ${FLUID_INSTALL_DIR}
193+
copy(third_party DEPS fluid_lib_dist
194+
SRCS ${FLUID_INSTALL_DIR}/third_party ${FLUID_INSTALL_DIR}/CMakeCache.txt
195+
DSTS ${FLUID_INFERENCE_INSTALL_DIR} ${FLUID_INFERENCE_INSTALL_DIR}
196+
)
197+
198+
# only need libpaddle_fluid.so/a and paddle_inference_api.h for inference-only library
199+
copy(inference_api_lib DEPS fluid_lib_dist
200+
SRCS ${FLUID_INSTALL_DIR}/paddle/fluid/inference/libpaddle_fluid.*
201+
${FLUID_INSTALL_DIR}/paddle/fluid/inference/paddle_inference_api.h
202+
DSTS ${FLUID_INFERENCE_INSTALL_DIR}/paddle/lib ${FLUID_INFERENCE_INSTALL_DIR}/paddle/include
203+
)
204+
205+
add_custom_target(inference_lib_dist DEPENDS third_party inference_api_lib)
206+
191207
# paddle fluid version
192-
execute_process(
193-
COMMAND ${GIT_EXECUTABLE} log --pretty=format:%H -1
194-
WORKING_DIRECTORY ${PADDLE_SOURCE_DIR}
195-
OUTPUT_VARIABLE PADDLE_GIT_COMMIT)
196-
set(version_file ${FLUID_INSTALL_DIR}/version.txt)
197-
file(WRITE ${version_file}
198-
"GIT COMMIT ID: ${PADDLE_GIT_COMMIT}\n"
199-
"WITH_MKL: ${WITH_MKL}\n"
200-
"WITH_GPU: ${WITH_GPU}\n")
201-
if(WITH_GPU)
202-
file(APPEND ${version_file}
203-
"CUDA version: ${CUDA_VERSION}\n"
204-
"CUDNN version: v${CUDNN_MAJOR_VERSION}\n")
205-
endif()
208+
function(version version_file)
209+
execute_process(
210+
COMMAND ${GIT_EXECUTABLE} log --pretty=format:%H -1
211+
WORKING_DIRECTORY ${PADDLE_SOURCE_DIR}
212+
OUTPUT_VARIABLE PADDLE_GIT_COMMIT)
213+
file(WRITE ${version_file}
214+
"GIT COMMIT ID: ${PADDLE_GIT_COMMIT}\n"
215+
"WITH_MKL: ${WITH_MKL}\n"
216+
"WITH_MKLDNN: ${WITH_MKLDNN}\n"
217+
"WITH_GPU: ${WITH_GPU}\n")
218+
if(WITH_GPU)
219+
file(APPEND ${version_file}
220+
"CUDA version: ${CUDA_VERSION}\n"
221+
"CUDNN version: v${CUDNN_MAJOR_VERSION}\n")
222+
endif()
223+
endfunction()
224+
version(${FLUID_INSTALL_DIR}/version.txt)
225+
version(${FLUID_INFERENCE_INSTALL_DIR}/version.txt)

0 commit comments

Comments
 (0)