run.sh 8.6 KB
Newer Older
Y
Yan Chunwei 已提交
1
#!/bin/bash
W
Wilber 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# 
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# 
#     http://www.apache.org/licenses/LICENSE-2.0
# 
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

17
set -x
I
iducn 已提交
18 19 20 21
PADDLE_ROOT=$1
TURN_ON_MKL=$2 # use MKL or Openblas
TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode
DATA_DIR=$4 # dataset
22 23
USE_TENSORRT=$5
TENSORRT_ROOT_DIR=$6 # TensorRT root dir, default to /usr
24 25
WITH_ONNXRUNTIME=$7
MSVC_STATIC_CRT=$8
26
CUDA_LIB=$9/lib/x64
I
iducn 已提交
27
inference_install_dir=${PADDLE_ROOT}/build/paddle_inference_install_dir
28
WIN_DETECT=$(echo `uname` | grep "Win") # detect current platform
N
nhzlx 已提交
29

I
iducn 已提交
30 31 32
cd `dirname $0`
current_dir=`pwd`
if [ $2 == ON ]; then
33
  # You can export yourself if move the install path
I
iducn 已提交
34 35
  MKL_LIB=${inference_install_dir}/third_party/install/mklml/lib
  export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${MKL_LIB}
36
fi
I
iducn 已提交
37
if [ $3 == ON ]; then
38
  use_gpu_list='true false'
Y
Yan Chunwei 已提交
39
else
40 41
  use_gpu_list='false'
fi
L
Luo Tao 已提交
42

43 44 45 46 47 48 49 50 51 52 53 54 55 56
mkdir -p $DATA_DIR
cd $DATA_DIR

if [ $7 == ON ]; then
  ONNXRUNTIME_LIB=${inference_install_dir}/third_party/install/onnxruntime/lib
  export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${ONNXRUNTIME_LIB}
  PADDLE2ONNX_LIB=${inference_install_dir}/third_party/install/paddle2onnx/lib
  export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${PADDLE2ONNX_LIB}
  #download model
  mkdir -p MobileNetV2
  cd MobileNetV2
  if [[ -e "MobileNetV2.inference.model.tar.gz" ]]; then
    echo "MobileNetV2.inference.model.tar.gz has been downloaded."
  else
57 58 59 60 61
    if [ $WIN_DETECT != "" ]; then
      wget -q -Y off http://paddle-inference-dist.bj.bcebos.com/MobileNetV2.inference.model.tar.gz
    else
      wget -q --no-proxy http://paddle-inference-dist.bj.bcebos.com/MobileNetV2.inference.model.tar.gz
    fi
62 63 64 65 66
    tar xzf *.tar.gz
  fi
  cd ..
fi

D
dzhwinter 已提交
67
PREFIX=inference-vis-demos%2F
68
URL_ROOT=http://paddlemodels.bj.bcebos.com/${PREFIX}
D
dzhwinter 已提交
69

L
Luo Tao 已提交
70 71
# download vis_demo data
function download() {
I
iducn 已提交
72 73 74
  dir_name=$1
  mkdir -p $dir_name
  cd $dir_name
D
dzhwinter 已提交
75
  if [[ -e "${PREFIX}${dir_name}.tar.gz" ]]; then
76
    echo "${PREFIX}${dir_name}.tar.gz has been downloaded."
D
dzhwinter 已提交
77
  else
78 79 80 81 82
      if [ $WIN_DETECT != "" ]; then
        wget -q -Y off ${URL_ROOT}$dir_name.tar.gz
      else
        wget -q --no-proxy ${URL_ROOT}$dir_name.tar.gz
      fi
I
iducn 已提交
83
      tar xzf *.tar.gz
D
dzhwinter 已提交
84
  fi
I
iducn 已提交
85
  cd ..
L
Luo Tao 已提交
86
}
87

L
Luo Tao 已提交
88 89
vis_demo_list='se_resnext50 ocr mobilenet'
for vis_demo_name in $vis_demo_list; do
I
iducn 已提交
90
  download $vis_demo_name
L
Luo Tao 已提交
91 92
done

93 94
# download word2vec data
mkdir -p word2vec
I
iducn 已提交
95
cd word2vec
96 97 98 99
if [[ -e "word2vec.inference.model.tar.gz" ]]; then
  echo "word2vec.inference.model.tar.gz has been downloaded."
else
    wget -q http://paddle-inference-dist.bj.bcebos.com/word2vec.inference.model.tar.gz
I
iducn 已提交
100
    tar xzf *.tar.gz
101 102
fi

L
Luo Tao 已提交
103
# compile and test the demo
I
iducn 已提交
104
cd $current_dir
L
Luo Tao 已提交
105
mkdir -p build
I
iducn 已提交
106 107
cd build
rm -rf *
L
Luo Tao 已提交
108

109
for WITH_STATIC_LIB in ON OFF; do
I
iducn 已提交
110
  if [ $(echo `uname` | grep "Win") != "" ]; then
W
Wilber 已提交
111 112
    # TODO(wilber, T8T9): Do we still need to support windows gpu static library
    if [ $TEST_GPU_CPU == ON ] && [ $WITH_STATIC_LIB == ON ]; then
Z
Zhou Wei 已提交
113
      continue
W
Wilber 已提交
114
    fi
115
    # -----simple_on_word2vec on windows-----
116
    cmake .. -GNinja -DPADDLE_LIB=${inference_install_dir} \
I
iducn 已提交
117
      -DWITH_MKL=$TURN_ON_MKL \
118
      -DDEMO_NAME=simple_on_word2vec \
I
iducn 已提交
119 120
      -DWITH_GPU=$TEST_GPU_CPU \
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
121
      -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT \
122 123 124 125
      -DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME \
      -DCMAKE_BUILD_TYPE=Release \
      -DCUDA_LIB="$CUDA_LIB"
    ninja
126
    for use_gpu in $use_gpu_list; do
127
      ./simple_on_word2vec.exe \
I
iducn 已提交
128 129 130
        --dirname=$DATA_DIR/word2vec/word2vec.inference.model \
        --use_gpu=$use_gpu
      if [ $? -ne 0 ]; then
131 132 133 134 135 136
        echo "simple_on_word2vec demo runs fail."
        exit 1
      fi
    done

    # -----vis_demo on windows-----
I
iducn 已提交
137
    rm -rf *
138
    cmake .. -GNinja -DPADDLE_LIB=${inference_install_dir} \
I
iducn 已提交
139
      -DWITH_MKL=$TURN_ON_MKL \
140
      -DDEMO_NAME=vis_demo \
I
iducn 已提交
141 142
      -DWITH_GPU=$TEST_GPU_CPU \
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
143
      -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT \
144 145 146 147
      -DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME \
      -DCMAKE_BUILD_TYPE=Release \
      -DCUDA_LIB="$CUDA_LIB"
    ninja
148 149
    for use_gpu in $use_gpu_list; do
      for vis_demo_name in $vis_demo_list; do
150
        ./vis_demo.exe \
I
iducn 已提交
151 152 153 154 155
          --modeldir=$DATA_DIR/$vis_demo_name/model \
          --data=$DATA_DIR/$vis_demo_name/data.txt \
          --refer=$DATA_DIR/$vis_demo_name/result.txt \
          --use_gpu=$use_gpu
        if [ $? -ne 0 ]; then
156 157 158 159 160
          echo "vis demo $vis_demo_name runs fail."
          exit 1
        fi
      done
    done
161
    
162 163 164
    # --------tensorrt mobilenet on windows------
    if [ $USE_TENSORRT == ON -a $TEST_GPU_CPU == ON ]; then
      rm -rf *
165
      cmake .. -GNinja -DPADDLE_LIB=${inference_install_dir} \
166 167 168 169 170 171
        -DWITH_MKL=$TURN_ON_MKL \
        -DDEMO_NAME=trt_mobilenet_demo \
        -DWITH_GPU=$TEST_GPU_CPU \
        -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
        -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT \
        -DUSE_TENSORRT=$USE_TENSORRT \
172
        -DTENSORRT_ROOT=$TENSORRT_ROOT_DIR \
173 174 175 176 177
        -DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME \
        -DCMAKE_BUILD_TYPE=Release \
        -DCUDA_LIB="$CUDA_LIB"
      ninja
      ./trt_mobilenet_demo.exe \
178 179 180 181 182 183 184 185
        --modeldir=$DATA_DIR/mobilenet/model \
        --data=$DATA_DIR/mobilenet/data.txt \
        --refer=$DATA_DIR/mobilenet/result.txt 
      if [ $? -ne 0 ]; then
        echo "trt demo trt_mobilenet_demo runs fail."
        exit 1
      fi
    fi
186
  else
187
    # -----simple_on_word2vec on linux/mac-----
I
iducn 已提交
188 189 190
    rm -rf *
    cmake .. -DPADDLE_LIB=${inference_install_dir} \
      -DWITH_MKL=$TURN_ON_MKL \
191
      -DDEMO_NAME=simple_on_word2vec \
I
iducn 已提交
192
      -DWITH_GPU=$TEST_GPU_CPU \
193 194
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
      -DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME
I
iducn 已提交
195 196 197
    make -j$(nproc)
    word2vec_model=$DATA_DIR'/word2vec/word2vec.inference.model'
    if [ -d $word2vec_model ]; then
198 199
      for use_gpu in $use_gpu_list; do
        ./simple_on_word2vec \
I
iducn 已提交
200 201 202
          --dirname=$DATA_DIR/word2vec/word2vec.inference.model \
          --use_gpu=$use_gpu
        if [ $? -ne 0 ]; then
203 204 205 206 207 208
          echo "simple_on_word2vec demo runs fail."
          exit 1
        fi
      done
    fi
    # ---------vis_demo on linux/mac---------
I
iducn 已提交
209 210 211
    rm -rf *
    cmake .. -DPADDLE_LIB=${inference_install_dir} \
      -DWITH_MKL=$TURN_ON_MKL \
212
      -DDEMO_NAME=vis_demo \
I
iducn 已提交
213
      -DWITH_GPU=$TEST_GPU_CPU \
214 215
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
      -DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME
I
iducn 已提交
216
    make -j$(nproc)
217 218 219
    for use_gpu in $use_gpu_list; do
      for vis_demo_name in $vis_demo_list; do
        ./vis_demo \
I
iducn 已提交
220 221 222 223 224
          --modeldir=$DATA_DIR/$vis_demo_name/model \
          --data=$DATA_DIR/$vis_demo_name/data.txt \
          --refer=$DATA_DIR/$vis_demo_name/result.txt \
          --use_gpu=$use_gpu
        if [ $? -ne 0 ]; then
225 226 227 228 229 230
          echo "vis demo $vis_demo_name runs fail."
          exit 1
        fi
      done
    done
    # --------tensorrt mobilenet on linux/mac------
I
iducn 已提交
231 232 233 234
    if [ $USE_TENSORRT == ON -a $TEST_GPU_CPU == ON ]; then
      rm -rf *
      cmake .. -DPADDLE_LIB=${inference_install_dir} \
        -DWITH_MKL=$TURN_ON_MKL \
235
        -DDEMO_NAME=trt_mobilenet_demo \
I
iducn 已提交
236 237 238
        -DWITH_GPU=$TEST_GPU_CPU \
        -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
        -DUSE_TENSORRT=$USE_TENSORRT \
239 240
        -DTENSORRT_ROOT=$TENSORRT_ROOT_DIR \
        -DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME
I
iducn 已提交
241
      make -j$(nproc)
242
      ./trt_mobilenet_demo \
I
iducn 已提交
243 244 245 246
        --modeldir=$DATA_DIR/mobilenet/model \
        --data=$DATA_DIR/mobilenet/data.txt \
        --refer=$DATA_DIR/mobilenet/result.txt 
      if [ $? -ne 0 ]; then
247 248 249
        echo "trt demo trt_mobilenet_demo runs fail."
        exit 1
      fi
N
nhzlx 已提交
250
    fi
251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270

    # --------onnxruntime mobilenetv2 on linux/mac------
    if [ $WITH_ONNXRUNTIME == ON ]; then
      rm -rf *
      cmake .. -DPADDLE_LIB=${inference_install_dir} \
        -DWITH_MKL=$TURN_ON_MKL \
        -DDEMO_NAME=onnxruntime_mobilenet_demo \
        -DWITH_GPU=$TEST_GPU_CPU \
        -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
        -DUSE_TENSORRT=$USE_TENSORRT \
        -DTENSORRT_ROOT=$TENSORRT_ROOT_DIR \
        -DWITH_ONNXRUNTIME=$WITH_ONNXRUNTIME
      make -j$(nproc)
      ./onnxruntime_mobilenet_demo \
        --modeldir=$DATA_DIR/MobileNetV2/MobileNetV2
      if [ $? -ne 0 ]; then
        echo "onnxruntime demo onnxruntime_mobilenet_demo runs fail."
        exit 1
      fi
    fi
271 272
  fi
done
273
set +x