diff --git a/paddle/.set_port.sh b/paddle/.set_port.sh index 617ac79a24889eef23b66235ace20be80e1ff4dc..e71f494aadf2c572988c1a88d39fa6bfd5e9a6c1 100755 --- a/paddle/.set_port.sh +++ b/paddle/.set_port.sh @@ -13,6 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -DIRNAME=`dirname $0` -source $DIRNAME/.common_test_util.sh -set_port $@ +DIRNAME="$(dirname "$0")" +sh "$DIRNAME"/.common_test_util.sh +set_port "$@" diff --git a/paddle/.set_python_path.sh b/paddle/.set_python_path.sh index 8fd58925ee4820269572176ff9496f42914652da..8da4565be617bdfd03b507005d20752809fcbd67 100755 --- a/paddle/.set_python_path.sh +++ b/paddle/.set_python_path.sh @@ -24,12 +24,14 @@ PYPATH="" set -x while getopts "d:" opt; do - case $opt in + case "$opt" in d) PYPATH=$OPTARG ;; + *) + ;; esac done -shift $(($OPTIND - 1)) +shift $(("$OPTIND" - 1)) export PYTHONPATH=$PYPATH:$PYTHONPATH -$@ +"$@" diff --git a/paddle/fluid/inference/api/demo_ci/clean.sh b/paddle/fluid/inference/api/demo_ci/clean.sh index 0d9f3d2aa237acaf3bd7adb031b1f2a73c555352..5f603465776f1e2952199e17f6d0a63d4dcfd0a7 100755 --- a/paddle/fluid/inference/api/demo_ci/clean.sh +++ b/paddle/fluid/inference/api/demo_ci/clean.sh @@ -1,4 +1,5 @@ +#!/bin/bash set -x -cd `dirname $0` +cd "$(dirname "$0")" || exit rm -rf build/ data/ set +x diff --git a/paddle/fluid/inference/api/demo_ci/run.sh b/paddle/fluid/inference/api/demo_ci/run.sh index 6d283ca56cb652515b5ade923ea85e38142bf08c..aee013e8f36528111ff74f2f769ed6e1161e552d 100755 --- a/paddle/fluid/inference/api/demo_ci/run.sh +++ b/paddle/fluid/inference/api/demo_ci/run.sh @@ -1,29 +1,29 @@ #!/bin/bash set -x -PADDLE_ROOT=$1 -TURN_ON_MKL=$2 # use MKL or Openblas -TEST_GPU_CPU=$3 # test both GPU/CPU mode or only CPU mode -DATA_DIR=$4 # dataset -TENSORRT_INCLUDE_DIR=$5 # TensorRT header file dir, default to /usr/local/TensorRT/include -TENSORRT_LIB_DIR=$6 # TensorRT lib file dir, default to /usr/local/TensorRT/lib -MSVC_STATIC_CRT=$7 -inference_install_dir=${PADDLE_ROOT}/build/paddle_inference_install_dir +PADDLE_ROOT="$1" +TURN_ON_MKL="$2" # use MKL or Openblas +TEST_GPU_CPU="$3" # test both GPU/CPU mode or only CPU mode +DATA_DIR="$4" # dataset +TENSORRT_INCLUDE_DIR="$5" # TensorRT header file dir, default to /usr/local/TensorRT/include +TENSORRT_LIB_DIR="$6" # TensorRT lib file dir, default to /usr/local/TensorRT/lib +MSVC_STATIC_CRT="$7" +inference_install_dir="${PADDLE_ROOT}"/build/paddle_inference_install_dir -cd `dirname $0` -current_dir=`pwd` -if [ $2 == ON ]; then +cd "$(dirname "$0")" || exit +current_dir=$(pwd) +if [ "$2" == ON ]; then # You can export yourself if move the install path - MKL_LIB=${inference_install_dir}/third_party/install/mklml/lib - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${MKL_LIB} + MKL_LIB="${inference_install_dir}"/third_party/install/mklml/lib + export LD_LIBRARY_PATH="$LD_LIBRARY_PATH":"${MKL_LIB}" fi -if [ $3 == ON ]; then +if [ "$3" == ON ]; then use_gpu_list='true false' else use_gpu_list='false' fi USE_TENSORRT=OFF -if [ -d "$TENSORRT_INCLUDE_DIR" -a -d "$TENSORRT_LIB_DIR" ]; then +if [ -d "$TENSORRT_INCLUDE_DIR" ] && [ -d "$TENSORRT_LIB_DIR" ]; then USE_TENSORRT=ON fi @@ -32,77 +32,79 @@ URL_ROOT=http://paddlemodels.bj.bcebos.com/${PREFIX} # download vis_demo data function download() { - dir_name=$1 - mkdir -p $dir_name - cd $dir_name + dir_name="$1" + mkdir -p "$dir_name" + cd "$dir_name" || exit if [[ -e "${PREFIX}${dir_name}.tar.gz" ]]; then echo "${PREFIX}${dir_name}.tar.gz has been downloaded." else - wget -q ${URL_ROOT}$dir_name.tar.gz - tar xzf *.tar.gz + wget -q "${URL_ROOT}""$dir_name".tar.gz + tar xzf ./*.tar.gz fi - cd .. + cd .. || exit } -mkdir -p $DATA_DIR -cd $DATA_DIR +mkdir -p "$DATA_DIR" +cd "$DATA_DIR" || exit vis_demo_list='se_resnext50 ocr mobilenet' for vis_demo_name in $vis_demo_list; do - download $vis_demo_name + download "$vis_demo_name" done # download word2vec data mkdir -p word2vec -cd word2vec +cd word2vec || exit if [[ -e "word2vec.inference.model.tar.gz" ]]; then echo "word2vec.inference.model.tar.gz has been downloaded." else wget -q http://paddle-inference-dist.bj.bcebos.com/word2vec.inference.model.tar.gz - tar xzf *.tar.gz + tar xzf ./*.tar.gz fi # compile and test the demo -cd $current_dir +cd "$current_dir" || exit mkdir -p build -cd build -rm -rf * +cd build || exit +rm -rf ./* for WITH_STATIC_LIB in ON OFF; do - if [ $(echo `uname` | grep "Win") != "" ]; then + if [ "$(uname | grep Win)" != "" ]; then # -----simple_on_word2vec on windows----- - cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \ - -DWITH_MKL=$TURN_ON_MKL \ + cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB="${inference_install_dir}" \ + -DWITH_MKL="$TURN_ON_MKL" \ -DDEMO_NAME=simple_on_word2vec \ - -DWITH_GPU=$TEST_GPU_CPU \ - -DWITH_STATIC_LIB=$WITH_STATIC_LIB \ - -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT + -DWITH_GPU="$TEST_GPU_CPU" \ + -DWITH_STATIC_LIB="$WITH_STATIC_LIB" \ + -DMSVC_STATIC_CRT="$MSVC_STATIC_CRT" msbuild /maxcpucount /property:Configuration=Release cpp_inference_demo.sln for use_gpu in $use_gpu_list; do Release/simple_on_word2vec.exe \ - --dirname=$DATA_DIR/word2vec/word2vec.inference.model \ - --use_gpu=$use_gpu - if [ $? -ne 0 ]; then + --dirname="$DATA_DIR"/word2vec/word2vec.inference.model \ + --use_gpu="$use_gpu" + EXCODE="$?" + if [ "$EXCODE" -ne 0 ]; then echo "simple_on_word2vec demo runs fail." exit 1 fi done # -----vis_demo on windows----- - rm -rf * - cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB=${inference_install_dir} \ - -DWITH_MKL=$TURN_ON_MKL \ + rm -rf ./* + cmake .. -G "Visual Studio 14 2015" -A x64 -DPADDLE_LIB="${inference_install_dir}" \ + -DWITH_MKL="$TURN_ON_MKL" \ -DDEMO_NAME=vis_demo \ - -DWITH_GPU=$TEST_GPU_CPU \ - -DWITH_STATIC_LIB=$WITH_STATIC_LIB \ - -DMSVC_STATIC_CRT=$MSVC_STATIC_CRT + -DWITH_GPU="$TEST_GPU_CPU" \ + -DWITH_STATIC_LIB="$WITH_STATIC_LIB" \ + -DMSVC_STATIC_CRT="$MSVC_STATIC_CRT" msbuild /maxcpucount /property:Configuration=Release cpp_inference_demo.sln for use_gpu in $use_gpu_list; do for vis_demo_name in $vis_demo_list; do Release/vis_demo.exe \ - --modeldir=$DATA_DIR/$vis_demo_name/model \ - --data=$DATA_DIR/$vis_demo_name/data.txt \ - --refer=$DATA_DIR/$vis_demo_name/result.txt \ - --use_gpu=$use_gpu - if [ $? -ne 0 ]; then + --modeldir="$DATA_DIR"/"$vis_demo_name"/model \ + --data="$DATA_DIR"/"$vis_demo_name"/data.txt \ + --refer="$DATA_DIR"/"$vis_demo_name"/result.txt \ + --use_gpu="$use_gpu" + EXCODE="$?" + if [ "$EXCODE" -ne 0 ]; then echo "vis demo $vis_demo_name runs fail." exit 1 fi @@ -110,63 +112,66 @@ for WITH_STATIC_LIB in ON OFF; do done else # -----simple_on_word2vec on linux/mac----- - rm -rf * - cmake .. -DPADDLE_LIB=${inference_install_dir} \ - -DWITH_MKL=$TURN_ON_MKL \ + rm -rf ./* + cmake .. -DPADDLE_LIB="${inference_install_dir}" \ + -DWITH_MKL="$TURN_ON_MKL" \ -DDEMO_NAME=simple_on_word2vec \ - -DWITH_GPU=$TEST_GPU_CPU \ - -DWITH_STATIC_LIB=$WITH_STATIC_LIB - make -j$(nproc) - word2vec_model=$DATA_DIR'/word2vec/word2vec.inference.model' - if [ -d $word2vec_model ]; then + -DWITH_GPU="$TEST_GPU_CPU" \ + -DWITH_STATIC_LIB="$WITH_STATIC_LIB" + make -j"$(nproc)" + word2vec_model="$DATA_DIR"'/word2vec/word2vec.inference.model' + if [ -d "$word2vec_model" ]; then for use_gpu in $use_gpu_list; do ./simple_on_word2vec \ - --dirname=$DATA_DIR/word2vec/word2vec.inference.model \ - --use_gpu=$use_gpu - if [ $? -ne 0 ]; then + --dirname="$DATA_DIR"/word2vec/word2vec.inference.model \ + --use_gpu="$use_gpu" + EXCODE="$?" + if [ "$EXCODE" -ne 0 ]; then echo "simple_on_word2vec demo runs fail." exit 1 fi done fi # ---------vis_demo on linux/mac--------- - rm -rf * - cmake .. -DPADDLE_LIB=${inference_install_dir} \ - -DWITH_MKL=$TURN_ON_MKL \ + rm -rf ./* + cmake .. -DPADDLE_LIB="${inference_install_dir}" \ + -DWITH_MKL="$TURN_ON_MKL" \ -DDEMO_NAME=vis_demo \ - -DWITH_GPU=$TEST_GPU_CPU \ - -DWITH_STATIC_LIB=$WITH_STATIC_LIB - make -j$(nproc) + -DWITH_GPU="$TEST_GPU_CPU" \ + -DWITH_STATIC_LIB="$WITH_STATIC_LIB" + make -j"$(nproc)" for use_gpu in $use_gpu_list; do for vis_demo_name in $vis_demo_list; do ./vis_demo \ - --modeldir=$DATA_DIR/$vis_demo_name/model \ - --data=$DATA_DIR/$vis_demo_name/data.txt \ - --refer=$DATA_DIR/$vis_demo_name/result.txt \ - --use_gpu=$use_gpu - if [ $? -ne 0 ]; then + --modeldir="$DATA_DIR"/"$vis_demo_name"/model \ + --data="$DATA_DIR"/"$vis_demo_name"/data.txt \ + --refer="$DATA_DIR"/"$vis_demo_name"/result.txt \ + --use_gpu="$use_gpu" + EXCODE="$?" + if [ "$EXCODE" -ne 0 ]; then echo "vis demo $vis_demo_name runs fail." exit 1 fi done done # --------tensorrt mobilenet on linux/mac------ - if [ $USE_TENSORRT == ON -a $TEST_GPU_CPU == ON ]; then - rm -rf * - cmake .. -DPADDLE_LIB=${inference_install_dir} \ - -DWITH_MKL=$TURN_ON_MKL \ + if [ "$USE_TENSORRT" == ON ] && [ "$TEST_GPU_CPU" == ON ]; then + rm -rf ./* + cmake .. -DPADDLE_LIB="${inference_install_dir}" \ + -DWITH_MKL="$TURN_ON_MKL" \ -DDEMO_NAME=trt_mobilenet_demo \ - -DWITH_GPU=$TEST_GPU_CPU \ - -DWITH_STATIC_LIB=$WITH_STATIC_LIB \ - -DUSE_TENSORRT=$USE_TENSORRT \ - -DTENSORRT_INCLUDE_DIR=$TENSORRT_INCLUDE_DIR \ - -DTENSORRT_LIB_DIR=$TENSORRT_LIB_DIR - make -j$(nproc) + -DWITH_GPU="$TEST_GPU_CPU" \ + -DWITH_STATIC_LIB="$WITH_STATIC_LIB" \ + -DUSE_TENSORRT="$USE_TENSORRT" \ + -DTENSORRT_INCLUDE_DIR="$TENSORRT_INCLUDE_DIR" \ + -DTENSORRT_LIB_DIR="$TENSORRT_LIB_DIR" + make -j"$(nproc)" ./trt_mobilenet_demo \ - --modeldir=$DATA_DIR/mobilenet/model \ - --data=$DATA_DIR/mobilenet/data.txt \ - --refer=$DATA_DIR/mobilenet/result.txt - if [ $? -ne 0 ]; then + --modeldir="$DATA_DIR"/mobilenet/model \ + --data="$DATA_DIR"/mobilenet/data.txt \ + --refer="$DATA_DIR"/mobilenet/result.txt + EXCODE="$?" + if [ "$EXCODE" != 0 ]; then echo "trt demo trt_mobilenet_demo runs fail." exit 1 fi diff --git a/paddle/fluid/inference/check_symbol.sh b/paddle/fluid/inference/check_symbol.sh index a0f64796576c85bfa4bf3a3af6c1f4489f09b84e..0c66946c4b8a1e9e7ed3370db2c23f0cf95202a5 100755 --- a/paddle/fluid/inference/check_symbol.sh +++ b/paddle/fluid/inference/check_symbol.sh @@ -1,12 +1,12 @@ #!/bin/sh -lib=$1 -if [ $# -ne 1 ]; then echo "No input library"; exit -1 ; fi +lib="$1" +if [ "$#" -ne 1 ]; then echo "No input library"; exit 1 ; fi -num_paddle_syms=$(nm -D ${lib} | grep paddle | wc -l) -num_google_syms=$(nm -D ${lib} | grep google | grep -v paddle | grep "T " | wc -l) +num_paddle_syms=$(nm -D "${lib}" | grep -c paddle ) +num_google_syms=$(nm -D "${lib}" | grep google | grep -v paddle | grep -c "T " ) -if [ $num_paddle_syms -le 0 ]; then echo "Have no paddle symbols"; exit -1 ; fi -if [ $num_google_syms -ge 1 ]; then echo "Have some google symbols"; exit -1 ; fi +if [ "$num_paddle_syms" -le 0 ]; then echo "Have no paddle symbols"; exit 1 ; fi +if [ "$num_google_syms" -ge 1 ]; then echo "Have some google symbols"; exit 1 ; fi exit 0 diff --git a/paddle/fluid/train/demo/clean.sh b/paddle/fluid/train/demo/clean.sh index a2064492c08b842ba8779823d0c2631bd8f8dbe5..192bdf8752c1590634e4e3621d4090aaa6926b14 100755 --- a/paddle/fluid/train/demo/clean.sh +++ b/paddle/fluid/train/demo/clean.sh @@ -15,6 +15,6 @@ # limitations under the License. set -x -cd "$(dirname "$0")" +cd "$(dirname "$0")" || exit rm -rf build/ set +x diff --git a/paddle/fluid/train/demo/run.sh b/paddle/fluid/train/demo/run.sh index 2955e7574daa2d2e41bbade95c3c213917d07d4f..a9c0ed4ac68a2aa6dc1fc9c5ebdb53a554e833e1 100755 --- a/paddle/fluid/train/demo/run.sh +++ b/paddle/fluid/train/demo/run.sh @@ -14,14 +14,14 @@ function download() { download # build demo trainer -paddle_install_dir=${PADDLE_ROOT}/build/paddle_install_dir +paddle_install_dir="${PADDLE_ROOT}"/build/paddle_install_dir mkdir -p build -cd build -rm -rf * -cmake .. -DPADDLE_LIB=$paddle_install_dir \ - -DWITH_MKLDNN=$TURN_ON_MKL \ - -DWITH_MKL=$TURN_ON_MKL +cd build || exit +rm -rf ./* +cmake .. -DPADDLE_LIB="$paddle_install_dir" \ + -DWITH_MKLDNN="$TURN_ON_MKL" \ + -DWITH_MKL="$TURN_ON_MKL" make cd .. diff --git a/paddle/fluid/train/imdb_demo/run.sh b/paddle/fluid/train/imdb_demo/run.sh index f71b4bac602a9e6d5c7bea03f3c56043b13547d3..8a585c614e53fe78e57eb09a8f30ceffa9cc72b9 100644 --- a/paddle/fluid/train/imdb_demo/run.sh +++ b/paddle/fluid/train/imdb_demo/run.sh @@ -1,3 +1,3 @@ - +#!/bin/bash set -exu build/demo_trainer --flagfile="train.cfg" diff --git a/paddle/scripts/paddle_docker_build.sh b/paddle/scripts/paddle_docker_build.sh index d6b639d0da2a54e1e31051c44bc05b333e8493ce..fdd0d490a6fdb761078d402e2247159a3588e024 100755 --- a/paddle/scripts/paddle_docker_build.sh +++ b/paddle/scripts/paddle_docker_build.sh @@ -15,14 +15,14 @@ # limitations under the License. function start_build_docker() { - docker pull $IMG + docker pull "$IMG" apt_mirror='s#http://archive.ubuntu.com/ubuntu#mirror://mirrors.ubuntu.com/mirrors.txt#g' DOCKER_ENV=$(cat <\t,,,... -PADDLE_ROOT="$(dirname $(readlink -f ${BASH_SOURCE[0]}))/.." +PADDLE_ROOT="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")/.." -find ${PADDLE_ROOT}/python/ -name '*.py' \ +find "${PADDLE_ROOT}"/python/ -name '*.py' \ | xargs grep -v '^#' \ | grep 'DEFINE_ALIAS' \ | perl -ne ' diff --git a/tools/manylinux1/build_scripts/install_nccl2.sh b/tools/manylinux1/build_scripts/install_nccl2.sh index 0c9bf1409d90d85fd80f11fc56b2a84596631a10..c2adf6a79de4bbb03ee70e3e475190c2ecc1945f 100644 --- a/tools/manylinux1/build_scripts/install_nccl2.sh +++ b/tools/manylinux1/build_scripts/install_nccl2.sh @@ -1,4 +1,19 @@ #!/bin/bash + +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + VERSION=$(nvcc --version | grep release | grep -oEi "release ([0-9]+)\.([0-9])"| sed "s/release //") if [ "$VERSION" == "10.0" ]; then DEB="nccl-repo-ubuntu1604-2.4.7-ga-cuda10.0_1-1_amd64.deb" @@ -24,10 +39,10 @@ wget -q -O $DIR/$DEB $URL cd $DIR && ar x $DEB && tar xf data.tar.xz DEBS=$(find ./var/ -name "*.deb") for sub_deb in $DEBS; do - echo $sub_deb - ar x $sub_deb && tar xf data.tar.xz + echo "$sub_deb" + ar x "$sub_deb" && tar xf data.tar.xz done mv -f usr/include/nccl.h /usr/local/include/ mv -f usr/lib/x86_64-linux-gnu/libnccl* /usr/local/lib/ rm /usr/include/nccl.h -rm -rf $DIR +rm -rf "$DIR"