@@ -43,21 +43,21 @@ function(inference_api_test TARGET_NAME)
43
43
endfunction (inference_api_test )
44
44
45
45
cc_library (paddle_inference_api
46
- SRCS paddle_inference_api .cc paddle_inference_api_impl .cc
46
+ SRCS api .cc api_impl .cc
47
47
DEPS ${FLUID_CORE_MODULES} ${GLOB_OP_LIB} )
48
48
if (NOT APPLE )
49
- set (LINK_FLAGS "-Wl,--retain-symbols-file ${CMAKE_CURRENT_SOURCE_DIR} /paddle_inference_api .sym" )
49
+ set (LINK_FLAGS "-Wl,--retain-symbols-file ${CMAKE_CURRENT_SOURCE_DIR} /api .sym" )
50
50
set_target_properties (paddle_inference_api PROPERTIES LINK_FLAGS "${LINK_FLAGS} " )
51
51
endif ()
52
52
53
53
# Here the shared library doesn't depend on other fluid libraries, or double free will occur.
54
54
cc_library (paddle_inference_api_shared SHARED
55
- SRCS paddle_inference_api .cc paddle_inference_api_impl .cc )
55
+ SRCS api .cc api_impl .cc )
56
56
add_dependencies (paddle_inference_api_shared ${FLUID_CORE_MODULES} ${GLOB_OP_LIB} )
57
57
set_target_properties (paddle_inference_api_shared PROPERTIES OUTPUT_NAME paddle_inference_api )
58
58
59
59
if (NOT APPLE )
60
- set (LINK_FLAGS "-Wl,--version-script ${CMAKE_CURRENT_SOURCE_DIR} /paddle_inference_api .map" )
60
+ set (LINK_FLAGS "-Wl,--version-script ${CMAKE_CURRENT_SOURCE_DIR} /api .map" )
61
61
set_target_properties (paddle_inference_api_shared PROPERTIES LINK_FLAGS "${LINK_FLAGS} " )
62
62
FILE (WRITE ${CMAKE_CURRENT_BINARY_DIR} /check_symbol.cmake
63
63
"execute_process(COMMAND bash -c \" ${CMAKE_CURRENT_SOURCE_DIR} /check_symbol.sh"
@@ -73,32 +73,32 @@ if(NOT APPLE)
73
73
endif ()
74
74
75
75
cc_test (test_paddle_inference_api
76
- SRCS test_paddle_inference_api .cc
76
+ SRCS test_api .cc
77
77
DEPS paddle_inference_api )
78
78
79
- inference_api_test (test_paddle_inference_api_impl
79
+ inference_api_test (test_api_impl
80
80
ARGS test_word2vec test_image_classification )
81
81
82
82
if (WITH_GPU AND TENSORRT_FOUND )
83
83
cc_library (paddle_inference_tensorrt_subgraph_engine
84
- SRCS paddle_inference_api_tensorrt_subgraph_engine .cc
85
- DEPS paddle_inference_api analysis tensorrt_engine paddle_inference_api paddle_fluid_api )
84
+ SRCS api_tensorrt_subgraph_engine .cc
85
+ DEPS paddle_inference_api analysis tensorrt_engine paddle_fluid_api )
86
86
87
- inference_api_test (test_paddle_inference_api_tensorrt_subgraph_engine ARGS test_word2vec )
87
+ inference_api_test (test_api_tensorrt_subgraph_engine ARGS test_word2vec )
88
88
endif ()
89
89
90
90
if (WITH_ANAKIN ) # only needed in CI
91
91
# Due to Anakin do not have official library releases and the versions of protobuf and cuda do not match Paddle's,
92
92
# so anakin library will not be merged to our official inference library. To use anakin prediction API, one need to
93
93
# compile the libinference_anakin_api.a and compile with anakin.so.
94
- nv_library (inference_anakin_api SRCS paddle_inference_api .cc paddle_inference_api_anakin_engine .cc )
95
- nv_library (inference_anakin_api_shared SHARED SRCS paddle_inference_api .cc paddle_inference_api_anakin_engine .cc )
94
+ nv_library (inference_anakin_api SRCS api .cc api_anakin_engine .cc )
95
+ nv_library (inference_anakin_api_shared SHARED SRCS api .cc api_anakin_engine .cc )
96
96
target_compile_options (inference_anakin_api BEFORE PUBLIC ${ANAKIN_COMPILE_EXTRA_FLAGS} )
97
97
target_compile_options (inference_anakin_api_shared BEFORE PUBLIC ${ANAKIN_COMPILE_EXTRA_FLAGS} )
98
98
target_link_libraries (inference_anakin_api anakin anakin_saber_common )
99
99
target_link_libraries (inference_anakin_api_shared anakin anakin_saber_common )
100
100
if (WITH_TESTING )
101
- cc_test (inference_anakin_test SRCS paddle_inference_api_anakin_engine_tester .cc
101
+ cc_test (inference_anakin_test SRCS api_anakin_engine_tester .cc
102
102
ARGS --model=${ANAKIN_INSTALL_DIR}/mobilenet_v2.anakin.bin
103
103
DEPS inference_anakin_api )
104
104
target_compile_options (inference_anakin_test BEFORE PUBLIC ${ANAKIN_COMPILE_EXTRA_FLAGS} )
0 commit comments