@@ -26,13 +26,13 @@ endif()
26
26
function (inference_api_test TARGET_NAME )
27
27
if (WITH_TESTING )
28
28
set (options "" )
29
- set (oneValueArgs "" )
29
+ set (oneValueArgs SRC )
30
30
set (multiValueArgs ARGS )
31
31
cmake_parse_arguments (inference_test "${options} " "${oneValueArgs} " "${multiValueArgs} " ${ARGN} )
32
32
33
33
set (PYTHON_TESTS_DIR ${PADDLE_BINARY_DIR} /python/paddle/fluid/tests )
34
34
cc_test (${TARGET_NAME}
35
- SRCS ${TARGET_NAME} .cc
35
+ SRCS ${inference_test_SRC}
36
36
DEPS "${inference_deps} "
37
37
ARGS --dirname=${PYTHON_TESTS_DIR}/book/ )
38
38
if (inference_test_ARGS )
@@ -73,24 +73,24 @@ if(NOT APPLE)
73
73
endif ()
74
74
75
75
cc_test (test_paddle_inference_api
76
- SRCS test_api .cc
76
+ SRCS api_tester .cc
77
77
DEPS paddle_inference_api )
78
78
79
- inference_api_test (test_api_impl
79
+ inference_api_test (test_api_impl SRC api_impl_tester.cc
80
80
ARGS test_word2vec test_image_classification )
81
81
82
82
if (WITH_GPU AND TENSORRT_FOUND )
83
83
cc_library (paddle_inference_tensorrt_subgraph_engine
84
84
SRCS api_tensorrt_subgraph_engine.cc
85
85
DEPS paddle_inference_api analysis tensorrt_engine paddle_inference_api paddle_fluid_api tensorrt_converter )
86
86
87
- inference_api_test (test_api_tensorrt_subgraph_engine ARGS test_word2vec )
87
+ inference_api_test (test_api_tensorrt_subgraph_engine SRC api_tensorrt_subgraph_engine_tester.cc ARGS test_word2vec )
88
88
endif ()
89
89
90
90
if (WITH_ANAKIN ) # only needed in CI
91
91
# Due to Anakin do not have official library releases and the versions of protobuf and cuda do not match Paddle's,
92
92
# so anakin library will not be merged to our official inference library. To use anakin prediction API, one need to
93
- # compile the libinference_anakin_api.a and compile with anakin.so.
93
+ # compile the libinference_anakin_api.a and anakin.so.
94
94
nv_library (inference_anakin_api SRCS api.cc api_anakin_engine.cc )
95
95
nv_library (inference_anakin_api_shared SHARED SRCS api.cc api_anakin_engine.cc )
96
96
target_compile_options (inference_anakin_api BEFORE PUBLIC ${ANAKIN_COMPILE_EXTRA_FLAGS} )
0 commit comments