run.sh 5.0 KB
Newer Older
Y
Yan Chunwei 已提交
1
#!/bin/bash
2 3
set -x
PADDLE_ROOT=$1
L
Luo Tao 已提交
4 5
TURN_ON_MKL=$2 # use MKL or Openblas
TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode
T
Tao Luo 已提交
6
DATA_DIR=$4 # dataset
翟飞跃 已提交
7
TENSORRT_INCLUDE_DIR=$5 # TensorRT header file dir, default to /usr/local/TensorRT/include
N
nhzlx 已提交
8
TENSORRT_LIB_DIR=$6 # TensorRT lib file dir, default to /usr/local/TensorRT/lib
9

10
inference_install_dir=${PADDLE_ROOT}/build/fluid_inference_install_dir
N
nhzlx 已提交
11

T
Tao Luo 已提交
12 13
cd `dirname $0`
current_dir=`pwd`
14 15
if [ $2 == ON ]; then
  # You can export yourself if move the install path
16
  MKL_LIB=${inference_install_dir}/third_party/install/mklml/lib
17 18 19
  export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${MKL_LIB}
fi
if [ $3 == ON ]; then
20
  use_gpu_list='true false'
Y
Yan Chunwei 已提交
21
else
22 23
  use_gpu_list='false'
fi
L
Luo Tao 已提交
24

N
nhzlx 已提交
25
USE_TENSORRT=OFF
N
nhzlx 已提交
26
if [ -d "$TENSORRT_INCLUDE_DIR" -a -d "$TENSORRT_LIB_DIR" ]; then
N
nhzlx 已提交
27 28 29
  USE_TENSORRT=ON
fi

D
dzhwinter 已提交
30
PREFIX=inference-vis-demos%2F
31
URL_ROOT=http://paddlemodels.bj.bcebos.com/${PREFIX}
D
dzhwinter 已提交
32

L
Luo Tao 已提交
33 34 35 36 37
# download vis_demo data
function download() {
  dir_name=$1
  mkdir -p $dir_name
  cd $dir_name
D
dzhwinter 已提交
38
  if [[ -e "${PREFIX}${dir_name}.tar.gz" ]]; then
39
    echo "${PREFIX}${dir_name}.tar.gz has been downloaded."
D
dzhwinter 已提交
40 41 42 43
  else
      wget -q ${URL_ROOT}$dir_name.tar.gz
      tar xzf *.tar.gz
  fi
L
Luo Tao 已提交
44 45
  cd ..
}
T
Tao Luo 已提交
46 47
mkdir -p $DATA_DIR
cd $DATA_DIR
L
Luo Tao 已提交
48 49 50 51 52
vis_demo_list='se_resnext50 ocr mobilenet'
for vis_demo_name in $vis_demo_list; do
  download $vis_demo_name
done

53 54 55 56 57 58 59 60 61 62
# download word2vec data
mkdir -p word2vec
cd word2vec
if [[ -e "word2vec.inference.model.tar.gz" ]]; then
  echo "word2vec.inference.model.tar.gz has been downloaded."
else
    wget -q http://paddle-inference-dist.bj.bcebos.com/word2vec.inference.model.tar.gz
    tar xzf *.tar.gz
fi

L
Luo Tao 已提交
63
# compile and test the demo
T
Tao Luo 已提交
64
cd $current_dir
L
Luo Tao 已提交
65 66
mkdir -p build
cd build
67
rm -rf *
L
Luo Tao 已提交
68

69 70 71
if [ $(echo `uname` | grep "Win") != "" ]; then
  # -----simple_on_word2vec on windows-----
  cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \
L
Luo Tao 已提交
72
    -DWITH_MKL=$TURN_ON_MKL \
L
Luo Tao 已提交
73
    -DDEMO_NAME=simple_on_word2vec \
L
Luo Tao 已提交
74
    -DWITH_GPU=$TEST_GPU_CPU \
75
    -DWITH_STATIC_LIB=OFF
76 77 78 79 80 81 82
  msbuild  /maxcpucount /property:Configuration=Release cpp_inference_demo.sln
  Release/simple_on_word2vec.exe \
      --dirname=$DATA_DIR/word2vec/word2vec.inference.model \
      --use_gpu=False
  if [ $? -ne 0 ]; then
    echo "simple_on_word2vec demo runs fail."
    exit 1
83
  fi
84 85

  # -----vis_demo on windows-----
L
Luo Tao 已提交
86
  rm -rf *
87
  cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \
L
Luo Tao 已提交
88 89 90
    -DWITH_MKL=$TURN_ON_MKL \
    -DDEMO_NAME=vis_demo \
    -DWITH_GPU=$TEST_GPU_CPU \
91
    -DWITH_STATIC_LIB=OFF
92 93 94 95 96 97 98 99 100 101 102
  msbuild  /maxcpucount /property:Configuration=Release cpp_inference_demo.sln
  for vis_demo_name in $vis_demo_list; do
    Release/vis_demo.exe \
      --modeldir=$DATA_DIR/$vis_demo_name/model \
      --data=$DATA_DIR/$vis_demo_name/data.txt \
      --refer=$DATA_DIR/$vis_demo_name/result.txt \
      --use_gpu=False
    if [ $? -ne 0 ]; then
      echo "vis demo $vis_demo_name runs fail."
      exit 1
    fi
L
Luo Tao 已提交
103
  done
104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125
else
  for WITH_STATIC_LIB in ON OFF; do
    # -----simple_on_word2vec on linux/mac-----
    rm -rf *
    cmake .. -DPADDLE_LIB=${inference_install_dir} \
      -DWITH_MKL=$TURN_ON_MKL \
      -DDEMO_NAME=simple_on_word2vec \
      -DWITH_GPU=$TEST_GPU_CPU \
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB
    make -j$(nproc)
    word2vec_model=$DATA_DIR'/word2vec/word2vec.inference.model'
    if [ -d $word2vec_model ]; then
      for use_gpu in $use_gpu_list; do
        ./simple_on_word2vec \
          --dirname=$DATA_DIR/word2vec/word2vec.inference.model \
          --use_gpu=$use_gpu
        if [ $? -ne 0 ]; then
          echo "simple_on_word2vec demo runs fail."
          exit 1
        fi
      done
    fi
Y
Yan Chunwei 已提交
126

127
    # ---------vis_demo on linux/mac---------
N
nhzlx 已提交
128
    rm -rf *
129
    cmake .. -DPADDLE_LIB=${inference_install_dir} \
N
nhzlx 已提交
130
      -DWITH_MKL=$TURN_ON_MKL \
131
      -DDEMO_NAME=vis_demo \
N
nhzlx 已提交
132
      -DWITH_GPU=$TEST_GPU_CPU \
133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB
    make -j$(nproc)
    for use_gpu in $use_gpu_list; do
      for vis_demo_name in $vis_demo_list; do
        ./vis_demo \
          --modeldir=$DATA_DIR/$vis_demo_name/model \
          --data=$DATA_DIR/$vis_demo_name/data.txt \
          --refer=$DATA_DIR/$vis_demo_name/result.txt \
          --use_gpu=$use_gpu
        if [ $? -ne 0 ]; then
          echo "vis demo $vis_demo_name runs fail."
          exit 1
        fi
      done
    done

    # --------tensorrt mobilenet on linux/mac------
    if [ $USE_TENSORRT == ON -a $TEST_GPU_CPU == ON ]; then
      rm -rf *
      cmake .. -DPADDLE_LIB=${inference_install_dir} \
        -DWITH_MKL=$TURN_ON_MKL \
        -DDEMO_NAME=trt_mobilenet_demo \
        -DWITH_GPU=$TEST_GPU_CPU \
        -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
        -DUSE_TENSORRT=$USE_TENSORRT \
        -DTENSORRT_INCLUDE_DIR=$TENSORRT_INCLUDE_DIR \
        -DTENSORRT_LIB_DIR=$TENSORRT_LIB_DIR
      make -j$(nproc)
      ./trt_mobilenet_demo \
        --modeldir=$DATA_DIR/mobilenet/model \
        --data=$DATA_DIR/mobilenet/data.txt \
        --refer=$DATA_DIR/mobilenet/result.txt 
      if [ $? -ne 0 ]; then
        echo "trt demo trt_mobilenet_demo runs fail."
        exit 1
      fi
N
nhzlx 已提交
169
    fi
170 171
  done
fi
172
set +x