run.sh 5.7 KB
Newer Older
Y
Yan Chunwei 已提交
1
#!/bin/bash
2
set -x
3 4 5 6 7 8 9 10
PADDLE_ROOT="$1"
TURN_ON_MKL="$2" # use MKL or Openblas
TEST_GPU_CPU="$3" # test both GPU/CPU mode or only CPU mode
DATA_DIR="$4" # dataset
TENSORRT_INCLUDE_DIR="$5" # TensorRT header file dir, default to /usr/local/TensorRT/include
TENSORRT_LIB_DIR="$6" # TensorRT lib file dir, default to /usr/local/TensorRT/lib
MSVC_STATIC_CRT="$7"
inference_install_dir="${PADDLE_ROOT}"/build/paddle_inference_install_dir
N
nhzlx 已提交
11

12 13 14
cd "$(dirname "$0")" || exit
current_dir=$(pwd)
if [ "$2" == ON ]; then
15
  # You can export yourself if move the install path
16 17
  MKL_LIB="${inference_install_dir}"/third_party/install/mklml/lib
  export LD_LIBRARY_PATH="$LD_LIBRARY_PATH":"${MKL_LIB}"
18
fi
19
if [ "$3" == ON ]; then
20
  use_gpu_list='true false'
Y
Yan Chunwei 已提交
21
else
22 23
  use_gpu_list='false'
fi
L
Luo Tao 已提交
24

N
nhzlx 已提交
25
USE_TENSORRT=OFF
26
if [ -d "$TENSORRT_INCLUDE_DIR" ] && [ -d "$TENSORRT_LIB_DIR" ]; then
N
nhzlx 已提交
27 28 29
  USE_TENSORRT=ON
fi

D
dzhwinter 已提交
30
PREFIX=inference-vis-demos%2F
31
URL_ROOT=http://paddlemodels.bj.bcebos.com/${PREFIX}
D
dzhwinter 已提交
32

L
Luo Tao 已提交
33 34
# download vis_demo data
function download() {
35 36 37
  dir_name="$1"
  mkdir -p "$dir_name"
  cd "$dir_name" || exit
D
dzhwinter 已提交
38
  if [[ -e "${PREFIX}${dir_name}.tar.gz" ]]; then
39
    echo "${PREFIX}${dir_name}.tar.gz has been downloaded."
D
dzhwinter 已提交
40
  else
41 42
      wget -q "${URL_ROOT}""$dir_name".tar.gz
      tar xzf ./*.tar.gz
D
dzhwinter 已提交
43
  fi
44
  cd .. || exit
L
Luo Tao 已提交
45
}
46 47
mkdir -p "$DATA_DIR"
cd "$DATA_DIR" || exit
L
Luo Tao 已提交
48 49
vis_demo_list='se_resnext50 ocr mobilenet'
for vis_demo_name in $vis_demo_list; do
50
  download "$vis_demo_name"
L
Luo Tao 已提交
51 52
done

53 54
# download word2vec data
mkdir -p word2vec
55
cd word2vec || exit
56 57 58 59
if [[ -e "word2vec.inference.model.tar.gz" ]]; then
  echo "word2vec.inference.model.tar.gz has been downloaded."
else
    wget -q http://paddle-inference-dist.bj.bcebos.com/word2vec.inference.model.tar.gz
60
    tar xzf ./*.tar.gz
61 62
fi

L
Luo Tao 已提交
63
# compile and test the demo
64
cd "$current_dir" || exit
L
Luo Tao 已提交
65
mkdir -p build
66 67
cd build || exit
rm -rf ./*
L
Luo Tao 已提交
68

69
for WITH_STATIC_LIB in ON OFF; do
70
  if [ "$(uname | grep Win)" != "" ]; then
71
    # -----simple_on_word2vec on windows-----
72 73
    cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB="${inference_install_dir}" \
      -DWITH_MKL="$TURN_ON_MKL" \
74
      -DDEMO_NAME=simple_on_word2vec \
75 76 77
      -DWITH_GPU="$TEST_GPU_CPU" \
      -DWITH_STATIC_LIB="$WITH_STATIC_LIB" \
      -DMSVC_STATIC_CRT="$MSVC_STATIC_CRT"
78 79 80
    msbuild  /maxcpucount /property:Configuration=Release cpp_inference_demo.sln
    for use_gpu in $use_gpu_list; do
      Release/simple_on_word2vec.exe \
81 82 83 84
        --dirname="$DATA_DIR"/word2vec/word2vec.inference.model \
        --use_gpu="$use_gpu"
      EXCODE="$?"
      if [ "$EXCODE" -ne 0 ]; then
85 86 87 88 89 90
        echo "simple_on_word2vec demo runs fail."
        exit 1
      fi
    done

    # -----vis_demo on windows-----
91 92 93
    rm -rf ./*
    cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB="${inference_install_dir}" \
      -DWITH_MKL="$TURN_ON_MKL" \
94
      -DDEMO_NAME=vis_demo \
95 96 97
      -DWITH_GPU="$TEST_GPU_CPU" \
      -DWITH_STATIC_LIB="$WITH_STATIC_LIB" \
      -DMSVC_STATIC_CRT="$MSVC_STATIC_CRT"
98 99 100 101
    msbuild  /maxcpucount /property:Configuration=Release cpp_inference_demo.sln
    for use_gpu in $use_gpu_list; do
      for vis_demo_name in $vis_demo_list; do
        Release/vis_demo.exe \
102 103 104 105 106 107
          --modeldir="$DATA_DIR"/"$vis_demo_name"/model \
          --data="$DATA_DIR"/"$vis_demo_name"/data.txt \
          --refer="$DATA_DIR"/"$vis_demo_name"/result.txt \
          --use_gpu="$use_gpu"
        EXCODE="$?"
        if [ "$EXCODE" -ne 0 ]; then
108 109 110 111 112 113
          echo "vis demo $vis_demo_name runs fail."
          exit 1
        fi
      done
    done
  else
114
    # -----simple_on_word2vec on linux/mac-----
115 116 117
    rm -rf ./*
    cmake .. -DPADDLE_LIB="${inference_install_dir}" \
      -DWITH_MKL="$TURN_ON_MKL" \
118
      -DDEMO_NAME=simple_on_word2vec \
119 120 121 122 123
      -DWITH_GPU="$TEST_GPU_CPU" \
      -DWITH_STATIC_LIB="$WITH_STATIC_LIB"
    make -j"$(nproc)"
    word2vec_model="$DATA_DIR"'/word2vec/word2vec.inference.model'
    if [ -d "$word2vec_model" ]; then
124 125
      for use_gpu in $use_gpu_list; do
        ./simple_on_word2vec \
126 127 128 129
          --dirname="$DATA_DIR"/word2vec/word2vec.inference.model \
          --use_gpu="$use_gpu"
        EXCODE="$?"
        if [ "$EXCODE" -ne 0 ]; then
130 131 132 133 134 135
          echo "simple_on_word2vec demo runs fail."
          exit 1
        fi
      done
    fi
    # ---------vis_demo on linux/mac---------
136 137 138
    rm -rf ./*
    cmake .. -DPADDLE_LIB="${inference_install_dir}" \
      -DWITH_MKL="$TURN_ON_MKL" \
139
      -DDEMO_NAME=vis_demo \
140 141 142
      -DWITH_GPU="$TEST_GPU_CPU" \
      -DWITH_STATIC_LIB="$WITH_STATIC_LIB"
    make -j"$(nproc)"
143 144 145
    for use_gpu in $use_gpu_list; do
      for vis_demo_name in $vis_demo_list; do
        ./vis_demo \
146 147 148 149 150 151
          --modeldir="$DATA_DIR"/"$vis_demo_name"/model \
          --data="$DATA_DIR"/"$vis_demo_name"/data.txt \
          --refer="$DATA_DIR"/"$vis_demo_name"/result.txt \
          --use_gpu="$use_gpu"
        EXCODE="$?"
        if [ "$EXCODE" -ne 0 ]; then
152 153 154 155 156 157
          echo "vis demo $vis_demo_name runs fail."
          exit 1
        fi
      done
    done
    # --------tensorrt mobilenet on linux/mac------
158 159 160 161
    if [ "$USE_TENSORRT" == ON ] && [ "$TEST_GPU_CPU" == ON ]; then
      rm -rf ./*
      cmake .. -DPADDLE_LIB="${inference_install_dir}" \
        -DWITH_MKL="$TURN_ON_MKL" \
162
        -DDEMO_NAME=trt_mobilenet_demo \
163 164 165 166 167 168
        -DWITH_GPU="$TEST_GPU_CPU" \
        -DWITH_STATIC_LIB="$WITH_STATIC_LIB" \
        -DUSE_TENSORRT="$USE_TENSORRT" \
        -DTENSORRT_INCLUDE_DIR="$TENSORRT_INCLUDE_DIR" \
        -DTENSORRT_LIB_DIR="$TENSORRT_LIB_DIR"
      make -j"$(nproc)"
169
      ./trt_mobilenet_demo \
170 171 172 173 174
        --modeldir="$DATA_DIR"/mobilenet/model \
        --data="$DATA_DIR"/mobilenet/data.txt \
        --refer="$DATA_DIR"/mobilenet/result.txt 
      EXCODE="$?"
      if [ "$EXCODE" != 0 ]; then
175 176 177
        echo "trt demo trt_mobilenet_demo runs fail."
        exit 1
      fi
N
nhzlx 已提交
178
    fi
179 180
  fi
done
181
set +x