run.sh 5.5 KB
Newer Older
Y
Yan Chunwei 已提交
1
#!/bin/bash
2 3
set -x
PADDLE_ROOT=$1
L
Luo Tao 已提交
4 5
TURN_ON_MKL=$2 # use MKL or Openblas
TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode
T
Tao Luo 已提交
6
DATA_DIR=$4 # dataset
翟飞跃 已提交
7
TENSORRT_INCLUDE_DIR=$5 # TensorRT header file dir, default to /usr/local/TensorRT/include
N
nhzlx 已提交
8
TENSORRT_LIB_DIR=$6 # TensorRT lib file dir, default to /usr/local/TensorRT/lib
9
MSVC_STATIC_CRT=$7
10
inference_install_dir=${PADDLE_ROOT}/build/paddle_inference_install_dir
N
nhzlx 已提交
11

T
Tao Luo 已提交
12 13
cd `dirname $0`
current_dir=`pwd`
14 15
if [ $2 == ON ]; then
  # You can export yourself if move the install path
16
  MKL_LIB=${inference_install_dir}/third_party/install/mklml/lib
17 18 19
  export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${MKL_LIB}
fi
if [ $3 == ON ]; then
20
  use_gpu_list='true false'
Y
Yan Chunwei 已提交
21
else
22 23
  use_gpu_list='false'
fi
L
Luo Tao 已提交
24

N
nhzlx 已提交
25
USE_TENSORRT=OFF
N
nhzlx 已提交
26
if [ -d "$TENSORRT_INCLUDE_DIR" -a -d "$TENSORRT_LIB_DIR" ]; then
N
nhzlx 已提交
27 28 29
  USE_TENSORRT=ON
fi

D
dzhwinter 已提交
30
PREFIX=inference-vis-demos%2F
31
URL_ROOT=http://paddlemodels.bj.bcebos.com/${PREFIX}
D
dzhwinter 已提交
32

L
Luo Tao 已提交
33 34 35 36 37
# download vis_demo data
function download() {
  dir_name=$1
  mkdir -p $dir_name
  cd $dir_name
D
dzhwinter 已提交
38
  if [[ -e "${PREFIX}${dir_name}.tar.gz" ]]; then
39
    echo "${PREFIX}${dir_name}.tar.gz has been downloaded."
D
dzhwinter 已提交
40 41 42 43
  else
      wget -q ${URL_ROOT}$dir_name.tar.gz
      tar xzf *.tar.gz
  fi
L
Luo Tao 已提交
44 45
  cd ..
}
T
Tao Luo 已提交
46 47
mkdir -p $DATA_DIR
cd $DATA_DIR
L
Luo Tao 已提交
48 49 50 51 52
vis_demo_list='se_resnext50 ocr mobilenet'
for vis_demo_name in $vis_demo_list; do
  download $vis_demo_name
done

53 54 55 56 57 58 59 60 61 62
# download word2vec data
mkdir -p word2vec
cd word2vec
if [[ -e "word2vec.inference.model.tar.gz" ]]; then
  echo "word2vec.inference.model.tar.gz has been downloaded."
else
    wget -q http://paddle-inference-dist.bj.bcebos.com/word2vec.inference.model.tar.gz
    tar xzf *.tar.gz
fi

L
Luo Tao 已提交
63
# compile and test the demo
T
Tao Luo 已提交
64
cd $current_dir
L
Luo Tao 已提交
65 66
mkdir -p build
cd build
67
rm -rf *
L
Luo Tao 已提交
68

69 70 71 72 73
for WITH_STATIC_LIB in ON OFF; do
  if [ $(echo `uname` | grep "Win") != "" ]; then
    # TODO(xingzhaolong, jiweibo): remove this if windows GPU library is ready.
    if [ $TEST_GPU_CPU == ON] && [ $WITH_STATIC_LIB ==ON ]; then
      return 0
74
    fi
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116
    
    # -----simple_on_word2vec on windows-----
    cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \
      -DWITH_MKL=$TURN_ON_MKL \
      -DDEMO_NAME=simple_on_word2vec \
      -DWITH_GPU=$TEST_GPU_CPU \
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
      -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT
    msbuild  /maxcpucount /property:Configuration=Release cpp_inference_demo.sln
    for use_gpu in $use_gpu_list; do
      Release/simple_on_word2vec.exe \
        --dirname=$DATA_DIR/word2vec/word2vec.inference.model \
        --use_gpu=$use_gpu
      if [ $? -ne 0 ]; then
        echo "simple_on_word2vec demo runs fail."
        exit 1
      fi
    done

    # -----vis_demo on windows-----
    rm -rf *
    cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \
      -DWITH_MKL=$TURN_ON_MKL \
      -DDEMO_NAME=vis_demo \
      -DWITH_GPU=$TEST_GPU_CPU \
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
      -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT
    msbuild  /maxcpucount /property:Configuration=Release cpp_inference_demo.sln
    for use_gpu in $use_gpu_list; do
      for vis_demo_name in $vis_demo_list; do
        Release/vis_demo.exe \
          --modeldir=$DATA_DIR/$vis_demo_name/model \
          --data=$DATA_DIR/$vis_demo_name/data.txt \
          --refer=$DATA_DIR/$vis_demo_name/result.txt \
          --use_gpu=$use_gpu
        if [ $? -ne 0 ]; then
          echo "vis demo $vis_demo_name runs fail."
          exit 1
        fi
      done
    done
  else
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137
    # -----simple_on_word2vec on linux/mac-----
    rm -rf *
    cmake .. -DPADDLE_LIB=${inference_install_dir} \
      -DWITH_MKL=$TURN_ON_MKL \
      -DDEMO_NAME=simple_on_word2vec \
      -DWITH_GPU=$TEST_GPU_CPU \
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB
    make -j$(nproc)
    word2vec_model=$DATA_DIR'/word2vec/word2vec.inference.model'
    if [ -d $word2vec_model ]; then
      for use_gpu in $use_gpu_list; do
        ./simple_on_word2vec \
          --dirname=$DATA_DIR/word2vec/word2vec.inference.model \
          --use_gpu=$use_gpu
        if [ $? -ne 0 ]; then
          echo "simple_on_word2vec demo runs fail."
          exit 1
        fi
      done
    fi
    # ---------vis_demo on linux/mac---------
N
nhzlx 已提交
138
    rm -rf *
139
    cmake .. -DPADDLE_LIB=${inference_install_dir} \
N
nhzlx 已提交
140
      -DWITH_MKL=$TURN_ON_MKL \
141
      -DDEMO_NAME=vis_demo \
N
nhzlx 已提交
142
      -DWITH_GPU=$TEST_GPU_CPU \
143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB
    make -j$(nproc)
    for use_gpu in $use_gpu_list; do
      for vis_demo_name in $vis_demo_list; do
        ./vis_demo \
          --modeldir=$DATA_DIR/$vis_demo_name/model \
          --data=$DATA_DIR/$vis_demo_name/data.txt \
          --refer=$DATA_DIR/$vis_demo_name/result.txt \
          --use_gpu=$use_gpu
        if [ $? -ne 0 ]; then
          echo "vis demo $vis_demo_name runs fail."
          exit 1
        fi
      done
    done
    # --------tensorrt mobilenet on linux/mac------
    if [ $USE_TENSORRT == ON -a $TEST_GPU_CPU == ON ]; then
      rm -rf *
      cmake .. -DPADDLE_LIB=${inference_install_dir} \
        -DWITH_MKL=$TURN_ON_MKL \
        -DDEMO_NAME=trt_mobilenet_demo \
        -DWITH_GPU=$TEST_GPU_CPU \
        -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
        -DUSE_TENSORRT=$USE_TENSORRT \
        -DTENSORRT_INCLUDE_DIR=$TENSORRT_INCLUDE_DIR \
        -DTENSORRT_LIB_DIR=$TENSORRT_LIB_DIR
      make -j$(nproc)
      ./trt_mobilenet_demo \
        --modeldir=$DATA_DIR/mobilenet/model \
        --data=$DATA_DIR/mobilenet/data.txt \
        --refer=$DATA_DIR/mobilenet/result.txt 
      if [ $? -ne 0 ]; then
        echo "trt demo trt_mobilenet_demo runs fail."
        exit 1
      fi
N
nhzlx 已提交
178
    fi
179 180
  fi
done
181
set +x