Skip to content

Commit 824a82d

Browse files
authored
Merge pull request #13672 from luotao1/gen_fluid_library
reduce inference ci time
2 parents 150c087 + cfbd71c commit 824a82d

File tree

2 files changed

+22
-8
lines changed

2 files changed

+22
-8
lines changed

paddle/fluid/inference/api/demo_ci/run.sh

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@ set -x
22
PADDLE_ROOT=$1
33
TURN_ON_MKL=$2 # use MKL or Openblas
44
TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode
5+
DATA_DIR=$4 # dataset
6+
cd `dirname $0`
7+
current_dir=`pwd`
58
if [ $2 == ON ]; then
69
# You can export yourself if move the install path
710
MKL_LIB=${PADDLE_ROOT}/build/fluid_install_dir/third_party/install/mklml/lib
@@ -29,15 +32,15 @@ function download() {
2932
fi
3033
cd ..
3134
}
32-
mkdir -p data
33-
cd data
35+
mkdir -p $DATA_DIR
36+
cd $DATA_DIR
3437
vis_demo_list='se_resnext50 ocr mobilenet'
3538
for vis_demo_name in $vis_demo_list; do
3639
download $vis_demo_name
3740
done
38-
cd ..
3941

4042
# compile and test the demo
43+
cd $current_dir
4144
mkdir -p build
4245
cd build
4346

@@ -73,9 +76,9 @@ for WITH_STATIC_LIB in ON OFF; do
7376
for use_gpu in $use_gpu_list; do
7477
for vis_demo_name in $vis_demo_list; do
7578
./vis_demo \
76-
--modeldir=../data/$vis_demo_name/model \
77-
--data=../data/$vis_demo_name/data.txt \
78-
--refer=../data/$vis_demo_name/result.txt \
79+
--modeldir=$DATA_DIR/$vis_demo_name/model \
80+
--data=$DATA_DIR/$vis_demo_name/data.txt \
81+
--refer=$DATA_DIR/$vis_demo_name/result.txt \
7982
--use_gpu=$use_gpu
8083
if [ $? -ne 0 ]; then
8184
echo "vis demo $vis_demo_name runs fail."

paddle/scripts/paddle_build.sh

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -654,11 +654,21 @@ function gen_fluid_inference_lib() {
654654
if [[ ${WITH_C_API:-OFF} == "OFF" && ${WITH_INFERENCE:-ON} == "ON" ]] ; then
655655
cat <<EOF
656656
========================================
657-
Deploying fluid inference library ...
657+
Generating fluid inference library ...
658658
========================================
659659
EOF
660660
cmake .. -DWITH_DISTRIBUTE=OFF
661661
make -j `nproc` inference_lib_dist
662+
fi
663+
}
664+
665+
function tar_fluid_inference_lib() {
666+
if [[ ${WITH_C_API:-OFF} == "OFF" && ${WITH_INFERENCE:-ON} == "ON" ]] ; then
667+
cat <<EOF
668+
========================================
669+
Taring fluid inference library ...
670+
========================================
671+
EOF
662672
cd ${PADDLE_ROOT}/build
663673
cp -r fluid_install_dir fluid
664674
tar -czf fluid.tgz fluid
@@ -673,7 +683,7 @@ function test_fluid_inference_lib() {
673683
========================================
674684
EOF
675685
cd ${PADDLE_ROOT}/paddle/fluid/inference/api/demo_ci
676-
./run.sh ${PADDLE_ROOT} ${WITH_MKL:-ON} ${WITH_GPU:-OFF}
686+
./run.sh ${PADDLE_ROOT} ${WITH_MKL:-ON} ${WITH_GPU:-OFF} ${INFERENCE_DEMO_INSTALL_DIR}
677687
./clean.sh
678688
fi
679689
}
@@ -722,6 +732,7 @@ function main() {
722732
fluid_inference_lib)
723733
cmake_gen ${PYTHON_ABI:-""}
724734
gen_fluid_inference_lib
735+
tar_fluid_inference_lib
725736
test_fluid_inference_lib
726737
;;
727738
check_style)

0 commit comments

Comments
 (0)