Skip to content

Commit b8ea7a0

Browse files
committed
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into unsqueeze_op
2 parents fbef49e + f920244 commit b8ea7a0

File tree

18 files changed

+496
-133
lines changed

18 files changed

+496
-133
lines changed

paddle/contrib/inference/CMakeLists.txt

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,10 @@ endfunction(inference_api_test)
4545
cc_library(paddle_inference_api
4646
SRCS paddle_inference_api.cc paddle_inference_api_impl.cc
4747
DEPS ${FLUID_CORE_MODULES} ${GLOB_OP_LIB})
48+
if(NOT APPLE)
49+
set(LINK_FLAGS "-Wl,--retain-symbols-file ${CMAKE_CURRENT_SOURCE_DIR}/paddle_inference_api.sym")
50+
set_target_properties(paddle_inference_api PROPERTIES LINK_FLAGS "${LINK_FLAGS}")
51+
endif()
4852

4953
# Here the shared library doesn't depend on other fluid libraries, or double free will occur.
5054
cc_library(paddle_inference_api_shared SHARED
@@ -53,8 +57,19 @@ add_dependencies(paddle_inference_api_shared ${FLUID_CORE_MODULES} ${GLOB_OP_LIB
5357
set_target_properties(paddle_inference_api_shared PROPERTIES OUTPUT_NAME paddle_inference_api)
5458

5559
if(NOT APPLE)
56-
set(LINK_FLAGS "-fPIC -fvisibility=hidden")
60+
set(LINK_FLAGS "-Wl,--version-script ${CMAKE_CURRENT_SOURCE_DIR}/paddle_inference_api.map")
5761
set_target_properties(paddle_inference_api_shared PROPERTIES LINK_FLAGS "${LINK_FLAGS}")
62+
FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/check_symbol.cmake
63+
"execute_process(COMMAND bash -c \"${CMAKE_CURRENT_SOURCE_DIR}/check_symbol.sh"
64+
" ${CMAKE_CURRENT_BINARY_DIR}/libpaddle_inference_api.so\" RESULT_VARIABLE symbol_res)\n"
65+
"if(NOT \"\${symbol_res}\" STREQUAL \"0\")\n"
66+
" message(FATAL_ERROR \"Check symbol failed.\")\n"
67+
"endif()\n")
68+
add_custom_command(
69+
OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/.check_symbol"
70+
COMMAND ${CMAKE_COMMAND} -P "${CMAKE_CURRENT_BINARY_DIR}/check_symbol.cmake"
71+
DEPENDS paddle_inference_api_shared)
72+
add_custom_target(check_symbol ALL DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/.check_symbol")
5873
endif()
5974

6075
cc_test(test_paddle_inference_api
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
3+
lib=$1
4+
if [ $# -ne 1 ]; then echo "No input library"; exit -1 ; fi
5+
6+
num_paddle_syms=$(nm -D --defined-only ${lib} | grep paddle | wc -l)
7+
num_google_syms=$(nm -D --defined-only ${lib} | grep google | wc -l)
8+
9+
if [ $num_paddle_syms -le 0 ]; then echo "Have no paddle symbols"; exit -1 ; fi
10+
if [ $num_google_syms -ge 1 ]; then echo "Have some google symbols"; exit -1 ; fi
11+
12+
exit 0

paddle/contrib/inference/demo/CMakeLists.txt

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,6 @@
1313
# limitations under the License.
1414
#
1515

16-
inference_api_test(simple_on_word2vec ARGS test_word2vec)
17-
1816
option(WITH_INFERENCE_DEMO "Compile with Inference demo" OFF)
1917
if(NOT WITH_INFERENCE_DEMO)
2018
return()
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
cmake_minimum_required(VERSION 3.0)
2+
3+
project(cpp_inference_demo CXX C)
4+
5+
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
6+
7+
if(NOT DEFINED PADDLE_LIB)
8+
message(FATAL_ERROR "please set PADDLE_LIB with -DPADDLE_LIB=/path/paddle/lib")
9+
endif()
10+
if(NOT DEFINED DEMO_NAME)
11+
message(FATAL_ERROR "please set DEMO_NAME with -DDEMO_NAME=demo_name")
12+
endif()
13+
14+
option(WITH_MKL "Compile demo with MKL/OpenBlas support, default use MKL." ON)
15+
option(WITH_GPU "Compile demo with GPU/CPU, default use CPU." OFF)
16+
option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." ON)
17+
18+
if(WITH_GPU)
19+
set(CUDA_LIB "/usr/local/cuda/lib64/" CACHE STRING "CUDA Library")
20+
endif()
21+
22+
include_directories("${PADDLE_LIB}")
23+
include_directories("${PADDLE_LIB}/third_party/install/protobuf/include")
24+
include_directories("${PADDLE_LIB}/third_party/install/glog/include")
25+
include_directories("${PADDLE_LIB}/third_party/install/gflags/include")
26+
include_directories("${PADDLE_LIB}/third_party/install/snappy/include")
27+
include_directories("${PADDLE_LIB}/third_party/install/snappystream/include")
28+
include_directories("${PADDLE_LIB}/third_party/install/zlib/include")
29+
30+
include_directories("${PADDLE_LIB}/third_party/boost")
31+
include_directories("${PADDLE_LIB}/third_party/eigen3")
32+
33+
link_directories("${PADDLE_LIB}/third_party/install/snappy/lib")
34+
link_directories("${PADDLE_LIB}/third_party/install/snappystream/lib")
35+
link_directories("${PADDLE_LIB}/third_party/install/protobuf/lib")
36+
link_directories("${PADDLE_LIB}/third_party/install/glog/lib")
37+
link_directories("${PADDLE_LIB}/third_party/install/gflags/lib")
38+
link_directories("${PADDLE_LIB}/third_party/install/zlib/lib")
39+
40+
add_executable(${DEMO_NAME} ${DEMO_NAME}.cc)
41+
42+
if(WITH_MKL)
43+
include_directories("${PADDLE_LIB}/third_party/install/mklml/include")
44+
set(MATH_LIB ${PADDLE_LIB}/third_party/install/mklml/lib/libmklml_intel.so
45+
${PADDLE_LIB}/third_party/install/mklml/lib/libiomp5.so)
46+
set(MKLDNN_PATH "${PADDLE_LIB}/third_party/install/mkldnn")
47+
if(EXISTS ${MKLDNN_PATH})
48+
include_directories("${MKLDNN_PATH}/include")
49+
set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0)
50+
endif()
51+
else()
52+
set(MATH_LIB ${PADDLE_LIB}/third_party/install/openblas/lib/libopenblas.a)
53+
endif()
54+
55+
if(WITH_STATIC_LIB)
56+
set(DEPS
57+
"-Wl,--whole-archive"
58+
${PADDLE_LIB}/paddle/fluid/inference/libpaddle_fluid.a
59+
"-Wl,--no-whole-archive"
60+
${PADDLE_LIB}/contrib/inference/libpaddle_inference_api.a)
61+
else()
62+
# Note: libpaddle_inference_api.so must put before libpaddle_fluid.so
63+
set(DEPS
64+
${PADDLE_LIB}/contrib/inference/libpaddle_inference_api.so
65+
${PADDLE_LIB}/paddle/fluid/inference/libpaddle_fluid.so)
66+
endif()
67+
set(EXTERNAL_LIB "-lrt -ldl -lpthread")
68+
69+
set(DEPS ${DEPS}
70+
${MATH_LIB} ${MKLDNN_LIB}
71+
glog gflags protobuf snappystream snappy z
72+
${EXTERNAL_LIB})
73+
if(WITH_GPU)
74+
set(DEPS ${DEPS} ${CUDA_LIB}/libcudart.so)
75+
endif()
76+
77+
target_link_libraries(${DEMO_NAME} ${DEPS})
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
set -x
2+
PADDLE_ROOT=$1
3+
WITH_MKL=$2
4+
WITH_GPU=$3
5+
if [ $3 == "ON" ]; then
6+
use_gpu_list='true false'
7+
else
8+
use_gpu_list='false'
9+
fi
10+
11+
mkdir -p build
12+
cd build
13+
14+
for WITH_STATIC_LIB in false; do
15+
rm -rf *
16+
cmake .. -DPADDLE_LIB=${PADDLE_ROOT}/build/fluid_install_dir/ \
17+
-DWITH_MKL=$WITH_MKL \
18+
-DDEMO_NAME=simple_on_word2vec \
19+
-DWITH_GPU=$WITH_GPU \
20+
-DWITH_STATIC_LIB=$WITH_STATIC_LIB
21+
make
22+
for use_gpu in $use_gpu_list; do
23+
./simple_on_word2vec \
24+
--dirname=${PADDLE_ROOT}/build/python/paddle/fluid/tests/book/word2vec.inference.model \
25+
--use_gpu=$use_gpu
26+
done
27+
done
28+
if [ $? -eq 0 ]; then
29+
exit 0
30+
else
31+
echo "inference demo runs fail."
32+
exit 1
33+
fi
34+
set +x

paddle/contrib/inference/demo/simple_on_word2vec.cc renamed to paddle/contrib/inference/demo_ci/simple_on_word2vec.cc

Lines changed: 39 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -16,21 +16,27 @@ limitations under the License. */
1616
* This file contains a simple demo for how to take a model for inference.
1717
*/
1818

19+
#include <gflags/gflags.h>
1920
#include <glog/logging.h>
20-
#include <gtest/gtest.h>
2121
#include <memory>
2222
#include <thread>
23-
#include "paddle/contrib/inference/paddle_inference_api.h"
23+
#include "contrib/inference/paddle_inference_api.h"
24+
#include "paddle/fluid/platform/enforce.h"
25+
26+
DEFINE_string(dirname, "", "Directory of the inference model.");
27+
DEFINE_bool(use_gpu, false, "Whether use gpu.");
2428

2529
namespace paddle {
2630
namespace demo {
2731

28-
DEFINE_string(dirname, "", "Directory of the inference model.");
29-
3032
void Main(bool use_gpu) {
3133
//# 1. Create PaddlePredictor with a config.
3234
NativeConfig config;
33-
config.model_dir = FLAGS_dirname + "word2vec.inference.model";
35+
if (FLAGS_dirname.empty()) {
36+
LOG(INFO) << "Usage: ./simple_on_word2vec --dirname=path/to/your/model";
37+
exit(1);
38+
}
39+
config.model_dir = FLAGS_dirname;
3440
config.use_gpu = use_gpu;
3541
config.fraction_of_gpu_memory = 0.15;
3642
config.device = 0;
@@ -54,12 +60,16 @@ void Main(bool use_gpu) {
5460
CHECK(predictor->Run(slots, &outputs));
5561

5662
//# 4. Get output.
57-
ASSERT_EQ(outputs.size(), 1UL);
58-
LOG(INFO) << "output buffer size: " << outputs.front().data.length();
63+
PADDLE_ENFORCE(outputs.size(), 1UL);
64+
// Check the output buffer size and result of each tid.
65+
PADDLE_ENFORCE(outputs.front().data.length(), 33168UL);
66+
float result[5] = {
67+
0.00129761, 0.00151112, 0.000423564, 0.00108815, 0.000932706};
5968
const size_t num_elements = outputs.front().data.length() / sizeof(float);
6069
// The outputs' buffers are in CPU memory.
6170
for (size_t i = 0; i < std::min(5UL, num_elements); i++) {
62-
LOG(INFO) << static_cast<float*>(outputs.front().data.data())[i];
71+
PADDLE_ENFORCE(static_cast<float*>(outputs.front().data.data())[i],
72+
result[i]);
6373
}
6474
}
6575
}
@@ -68,7 +78,7 @@ void MainThreads(int num_threads, bool use_gpu) {
6878
// Multi-threads only support on CPU
6979
// 0. Create PaddlePredictor with a config.
7080
NativeConfig config;
71-
config.model_dir = FLAGS_dirname + "word2vec.inference.model";
81+
config.model_dir = FLAGS_dirname;
7282
config.use_gpu = use_gpu;
7383
config.fraction_of_gpu_memory = 0.15;
7484
config.device = 0;
@@ -94,14 +104,17 @@ void MainThreads(int num_threads, bool use_gpu) {
94104
CHECK(predictor->Run(inputs, &outputs));
95105

96106
// 4. Get output.
97-
ASSERT_EQ(outputs.size(), 1UL);
98-
LOG(INFO) << "TID: " << tid << ", "
99-
<< "output buffer size: " << outputs.front().data.length();
107+
PADDLE_ENFORCE(outputs.size(), 1UL);
108+
// Check the output buffer size and result of each tid.
109+
PADDLE_ENFORCE(outputs.front().data.length(), 33168UL);
110+
float result[5] = {
111+
0.00129761, 0.00151112, 0.000423564, 0.00108815, 0.000932706};
100112
const size_t num_elements =
101113
outputs.front().data.length() / sizeof(float);
102114
// The outputs' buffers are in CPU memory.
103115
for (size_t i = 0; i < std::min(5UL, num_elements); i++) {
104-
LOG(INFO) << static_cast<float*>(outputs.front().data.data())[i];
116+
PADDLE_ENFORCE(static_cast<float*>(outputs.front().data.data())[i],
117+
result[i]);
105118
}
106119
}
107120
});
@@ -111,15 +124,18 @@ void MainThreads(int num_threads, bool use_gpu) {
111124
}
112125
}
113126

114-
TEST(demo, word2vec_cpu) { Main(false /*use_gpu*/); }
115-
TEST(demo_multi_threads, word2vec_cpu_1) { MainThreads(1, false /*use_gpu*/); }
116-
TEST(demo_multi_threads, word2vec_cpu_4) { MainThreads(4, false /*use_gpu*/); }
117-
118-
#ifdef PADDLE_WITH_CUDA
119-
TEST(demo, word2vec_gpu) { Main(true /*use_gpu*/); }
120-
TEST(demo_multi_threads, word2vec_gpu_1) { MainThreads(1, true /*use_gpu*/); }
121-
TEST(demo_multi_threads, word2vec_gpu_4) { MainThreads(4, true /*use_gpu*/); }
122-
#endif
123-
124127
} // namespace demo
125128
} // namespace paddle
129+
130+
int main(int argc, char** argv) {
131+
google::ParseCommandLineFlags(&argc, &argv, true);
132+
paddle::demo::Main(false /* use_gpu*/);
133+
paddle::demo::MainThreads(1, false /* use_gpu*/);
134+
paddle::demo::MainThreads(4, false /* use_gpu*/);
135+
if (FLAGS_use_gpu) {
136+
paddle::demo::Main(true /*use_gpu*/);
137+
paddle::demo::MainThreads(1, true /*use_gpu*/);
138+
paddle::demo::MainThreads(4, true /*use_gpu*/);
139+
}
140+
return 0;
141+
}
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
global:
3+
*paddle*;
4+
local:
5+
*;
6+
};
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
*paddle*

paddle/fluid/inference/CMakeLists.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,12 @@ endif()
1313

1414
# Create static library
1515
cc_library(paddle_fluid DEPS ${fluid_modules} paddle_fluid_api)
16+
if(NOT APPLE)
17+
# TODO(liuyiqu: Temporarily disable the link flag because it is not support on Mac.
18+
set(LINK_FLAGS "-Wl,--retain-symbols-file ${CMAKE_CURRENT_SOURCE_DIR}/paddle_fluid.sym")
19+
set_target_properties(paddle_fluid PROPERTIES LINK_FLAGS "${LINK_FLAGS}")
20+
endif()
21+
1622
# Create shared library
1723
cc_library(paddle_fluid_shared SHARED
1824
SRCS io.cc
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
*paddle*

0 commit comments

Comments
 (0)