run.sh 3.3 KB
Newer Older
1 2
set -x
PADDLE_ROOT=$1
L
Luo Tao 已提交
3 4
TURN_ON_MKL=$2 # use MKL or Openblas
TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode
T
Tao Luo 已提交
5
DATA_DIR=$4 # dataset
N
nhzlx 已提交
6 7
TENSORRT_INCLUDE_DIR=$5 # TensorRT header file dir, defalut to /usr/local/TensorRT/include
TENSORRT_LIB_DIR=$6 # TensorRT lib file dir, default to /usr/local/TensorRT/lib
8
inference_install_dir=${PADDLE_ROOT}/build/fluid_inference_install_dir
N
nhzlx 已提交
9

T
Tao Luo 已提交
10 11
cd `dirname $0`
current_dir=`pwd`
12 13
if [ $2 == ON ]; then
  # You can export yourself if move the install path
14
  MKL_LIB=${inference_install_dir}/third_party/install/mklml/lib
15 16 17
  export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${MKL_LIB}
fi
if [ $3 == ON ]; then
18
  use_gpu_list='true false'
Y
Yan Chunwei 已提交
19
else
20 21
  use_gpu_list='false'
fi
L
Luo Tao 已提交
22

N
nhzlx 已提交
23
USE_TENSORRT=OFF
N
nhzlx 已提交
24
if [ -d "$TENSORRT_INCLUDE_DIR" -a -d "$TENSORRT_LIB_DIR" ]; then
N
nhzlx 已提交
25 26 27
  USE_TENSORRT=ON
fi

D
dzhwinter 已提交
28
PREFIX=inference-vis-demos%2F
L
luotao1 已提交
29
URL_ROOT=http://paddlemodels.cdn.bcebos.com/${PREFIX}
D
dzhwinter 已提交
30

L
Luo Tao 已提交
31 32 33 34 35
# download vis_demo data
function download() {
  dir_name=$1
  mkdir -p $dir_name
  cd $dir_name
D
dzhwinter 已提交
36 37 38 39 40 41
  if [[ -e "${PREFIX}${dir_name}.tar.gz" ]]; then
    echo "${PREFIX}{dir_name}.tar.gz has been downloaded."
  else
      wget -q ${URL_ROOT}$dir_name.tar.gz
      tar xzf *.tar.gz
  fi
L
Luo Tao 已提交
42 43
  cd ..
}
T
Tao Luo 已提交
44 45
mkdir -p $DATA_DIR
cd $DATA_DIR
L
Luo Tao 已提交
46 47 48 49 50 51
vis_demo_list='se_resnext50 ocr mobilenet'
for vis_demo_name in $vis_demo_list; do
  download $vis_demo_name
done

# compile and test the demo
T
Tao Luo 已提交
52
cd $current_dir
L
Luo Tao 已提交
53 54 55
mkdir -p build
cd build

56
for WITH_STATIC_LIB in ON OFF; do
L
Luo Tao 已提交
57
  # -----simple_on_word2vec-----
L
Luo Tao 已提交
58
  rm -rf *
59
  cmake .. -DPADDLE_LIB=${inference_install_dir} \
L
Luo Tao 已提交
60
    -DWITH_MKL=$TURN_ON_MKL \
L
Luo Tao 已提交
61
    -DDEMO_NAME=simple_on_word2vec \
L
Luo Tao 已提交
62
    -DWITH_GPU=$TEST_GPU_CPU \
Y
Yan Chunwei 已提交
63 64
    -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
    -DON_INFER=ON
65
  make -j
66 67 68 69 70 71 72 73 74 75 76 77
  word2vec_model=${PADDLE_ROOT}'/build/python/paddle/fluid/tests/book/word2vec.inference.model'
  if [ -d $word2vec_model ]; then
    for use_gpu in $use_gpu_list; do
      ./simple_on_word2vec \
        --dirname=$word2vec_model \
        --use_gpu=$use_gpu
      if [ $? -ne 0 ]; then
        echo "simple_on_word2vec demo runs fail."
        exit 1
      fi
    done
  fi
L
Luo Tao 已提交
78 79
  # ---------vis_demo---------
  rm -rf *
80
  cmake .. -DPADDLE_LIB=${inference_install_dir} \
L
Luo Tao 已提交
81 82 83
    -DWITH_MKL=$TURN_ON_MKL \
    -DDEMO_NAME=vis_demo \
    -DWITH_GPU=$TEST_GPU_CPU \
Y
Yan Chunwei 已提交
84 85
    -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
    -DON_INFER=ON
L
Luo Tao 已提交
86
  make -j
87
  for use_gpu in $use_gpu_list; do
Y
Yan Chunwei 已提交
88
    for vis_demo_name in $vis_demo_list; do
L
Luo Tao 已提交
89
      ./vis_demo \
T
Tao Luo 已提交
90 91 92
        --modeldir=$DATA_DIR/$vis_demo_name/model \
        --data=$DATA_DIR/$vis_demo_name/data.txt \
        --refer=$DATA_DIR/$vis_demo_name/result.txt \
L
Luo Tao 已提交
93 94 95 96 97 98 99
        --use_gpu=$use_gpu
      if [ $? -ne 0 ]; then
        echo "vis demo $vis_demo_name runs fail."
        exit 1
      fi
    done
  done
Y
Yan Chunwei 已提交
100

N
nhzlx 已提交
101 102 103
  # --------tensorrt mobilenet------
  if [ $USE_TENSORRT == ON -a $TEST_GPU_CPU == ON ]; then
    rm -rf *
104
    cmake .. -DPADDLE_LIB=${inference_install_dir} \
N
nhzlx 已提交
105
      -DWITH_MKL=$TURN_ON_MKL \
N
nhzlx 已提交
106
      -DDEMO_NAME=trt_mobilenet_demo \
N
nhzlx 已提交
107 108 109 110
      -DWITH_GPU=$TEST_GPU_CPU \
      -DWITH_STATIC_LIB=$WITH_STATIC_LIB \
      -DUSE_TENSORRT=$USE_TENSORRT \
      -DTENSORRT_INCLUDE_DIR=$TENSORRT_INCLUDE_DIR \
Y
Yan Chunwei 已提交
111 112 113
      -DTENSORRT_LIB_DIR=$TENSORRT_LIB_DIR \
      -DON_INFER=ON
    make -j
N
nhzlx 已提交
114
    ./trt_mobilenet_demo \
N
nhzlx 已提交
115 116
      --modeldir=$DATA_DIR/mobilenet/model \
      --data=$DATA_DIR/mobilenet/data.txt \
N
nhzlx 已提交
117
      --refer=$DATA_DIR/mobilenet/result.txt 
N
nhzlx 已提交
118
  fi
119 120
done
set +x