@@ -150,16 +150,16 @@ if (WITH_ANAKIN AND WITH_MKL)
150
150
SRCS
151
151
${PADDLE_BINARY_DIR} /paddle/fluid/inference/api/libinference_anakin_api* # compiled anakin api
152
152
${ANAKIN_INSTALL_DIR} # anakin release
153
- DSTS ${dst_dir} /inference /anakin ${FLUID_INSTALL_DIR} /third_party/install/anakin )
153
+ DSTS ${FLUID_INSTALL_DIR} /third_party/install /anakin ${FLUID_INSTALL_DIR} /third_party/install/anakin )
154
154
list (APPEND inference_deps anakin_inference_lib )
155
155
endif ()
156
156
157
157
set (module "inference" )
158
158
copy (inference_lib DEPS ${inference_deps}
159
159
SRCS ${src_dir} /${module}/*.h ${PADDLE_BINARY_DIR} /paddle/fluid/inference/libpaddle_fluid.*
160
- ${src_dir} /${module}/api/paddle_inference_api.h ${src_dir} /${module}/api/demo_ci
160
+ ${src_dir} /${module}/api/paddle_inference_api.h
161
161
${PADDLE_BINARY_DIR} /paddle/fluid/inference/api/paddle_inference_pass.h
162
- DSTS ${dst_dir} /${module} ${dst_dir} /${module} ${dst_dir} /${module} ${dst_dir} /${module} ${dst_dir} /${module}
162
+ DSTS ${dst_dir} /${module} ${dst_dir} /${module} ${dst_dir} /${module} ${dst_dir} /${module}
163
163
)
164
164
165
165
set (module "platform" )
@@ -188,18 +188,38 @@ copy(cmake_cache
188
188
# This command generates a complete fluid library for both train and inference
189
189
add_custom_target (fluid_lib_dist DEPENDS ${fluid_lib_dist_dep} )
190
190
191
+ # Following commands generate a inference-only fluid library
192
+ # third_party, version.txt and CMakeCache.txt are the same position with ${FLUID_INSTALL_DIR}
193
+ copy (third_party DEPS fluid_lib_dist
194
+ SRCS ${FLUID_INSTALL_DIR} /third_party ${FLUID_INSTALL_DIR} /CMakeCache.txt
195
+ DSTS ${FLUID_INFERENCE_INSTALL_DIR} ${FLUID_INFERENCE_INSTALL_DIR}
196
+ )
197
+
198
+ # only need libpaddle_fluid.so/a and paddle_inference_api.h for inference-only library
199
+ copy (inference_api_lib DEPS fluid_lib_dist
200
+ SRCS ${FLUID_INSTALL_DIR} /paddle/fluid/inference/libpaddle_fluid.*
201
+ ${FLUID_INSTALL_DIR} /paddle/fluid/inference/paddle_inference_api.h
202
+ DSTS ${FLUID_INFERENCE_INSTALL_DIR} /paddle/lib ${FLUID_INFERENCE_INSTALL_DIR} /paddle/include
203
+ )
204
+
205
+ add_custom_target (inference_lib_dist DEPENDS third_party inference_api_lib )
206
+
191
207
# paddle fluid version
192
- execute_process (
193
- COMMAND ${GIT_EXECUTABLE} log --pretty=format:%H -1
194
- WORKING_DIRECTORY ${PADDLE_SOURCE_DIR}
195
- OUTPUT_VARIABLE PADDLE_GIT_COMMIT )
196
- set (version_file ${FLUID_INSTALL_DIR} /version.txt )
197
- file (WRITE ${version_file}
198
- "GIT COMMIT ID: ${PADDLE_GIT_COMMIT} \n "
199
- "WITH_MKL: ${WITH_MKL} \n "
200
- "WITH_GPU: ${WITH_GPU} \n " )
201
- if (WITH_GPU )
202
- file (APPEND ${version_file}
203
- "CUDA version: ${CUDA_VERSION} \n "
204
- "CUDNN version: v${CUDNN_MAJOR_VERSION} \n " )
205
- endif ()
208
+ function (version version_file )
209
+ execute_process (
210
+ COMMAND ${GIT_EXECUTABLE} log --pretty=format:%H -1
211
+ WORKING_DIRECTORY ${PADDLE_SOURCE_DIR}
212
+ OUTPUT_VARIABLE PADDLE_GIT_COMMIT )
213
+ file (WRITE ${version_file}
214
+ "GIT COMMIT ID: ${PADDLE_GIT_COMMIT} \n "
215
+ "WITH_MKL: ${WITH_MKL} \n "
216
+ "WITH_MKLDNN: ${WITH_MKLDNN} \n "
217
+ "WITH_GPU: ${WITH_GPU} \n " )
218
+ if (WITH_GPU )
219
+ file (APPEND ${version_file}
220
+ "CUDA version: ${CUDA_VERSION} \n "
221
+ "CUDNN version: v${CUDNN_MAJOR_VERSION} \n " )
222
+ endif ()
223
+ endfunction ()
224
+ version (${FLUID_INSTALL_DIR} /version.txt )
225
+ version (${FLUID_INFERENCE_INSTALL_DIR} /version.txt )
0 commit comments