submit_local.sh.in 5.0 KB
Newer Older
Z
zhangjinchao01 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
#!/bin/bash

function usage(){
        echo "usage: paddle [--help] [<args>]"
        echo "These are common paddle commands used in various situations:"
        echo "    train             Start a paddle_trainer"
        echo "    merge_model       Start a paddle_merge_model"
        echo "    pserver           Start a paddle_pserver_main"
        echo "    version           Print paddle version"
        echo "    dump_config       Dump the trainer config as proto string"
        echo "    make_diagram      Make Diagram using Graphviz"
        echo ""
        echo "'paddle train --help' 'paddle merge_model --help', 'paddle pserver --help', list more detailed usage of each command"
}


function version(){
        echo "PaddlePaddle @PADDLE_VERSION@, compiled with"
        echo "    with_avx: @WITH_AVX@"
        echo "    with_gpu: @WITH_GPU@"
T
tensor-tang 已提交
21
        echo "    with_mkldnn: @WITH_MKLDNN@"
T
tensor-tang 已提交
22
        echo "    with_mklml: @WITH_MKLML@"
Z
zhangjinchao01 已提交
23 24 25 26 27 28
        echo "    with_double: @WITH_DOUBLE@"
        echo "    with_python: @WITH_PYTHON@"
        echo "    with_rdma: @WITH_RDMA@"
        echo "    with_timer: @WITH_TIMER@"
}

Y
Yu Yang 已提交
29
function ver2num() {
Y
Yu Yang 已提交
30
  set -e
Y
Yu Yang 已提交
31 32 33 34 35 36 37 38 39 40 41 42
  # convert version to number.
  if [ -z "$1" ]; then # empty argument
    printf "%03d%03d%03d%03d%03d" 0
  else
    local VERN=$(echo $1 | sed 's#v##g' | sed 's#\.# #g' \
        | sed 's#a# 0 #g' | sed 's#b# 1 #g' | sed 's#rc# 2 #g')
    if [ `echo $VERN | wc -w` -eq 3 ] ; then
      printf "%03d%03d%03d%03d%03d" $VERN 999 999
    else
      printf "%03d%03d%03d%03d%03d" $VERN
    fi
  fi
Y
Yu Yang 已提交
43
  set +e
Y
Yu Yang 已提交
44 45
}

46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81
function cpu_config() {
  # auto set KMP_AFFINITY and OMP_DYNAMIC from Hyper Threading Status
  # only when MKLDNN or MKLML enabled
  if [ "@WITH_MKLDNN@" == "OFF" ] && [ "@WITH_MKLML@" == "OFF"]; then
    return 0
  fi
  ht=`lscpu |grep "per core"|awk -F':' '{print $2}'|xargs`
  if [ $ht -eq 1 ]; then # HT is OFF
    if [ -z "$KMP_AFFINITY" ]; then
      export KMP_AFFINITY="granularity=fine,compact,0,0"
    fi
    if [ -z "$OMP_DYNAMIC" ]; then
      export OMP_DYNAMIC="FALSE"
    fi
  else # HT is ON
    if [ -z "$KMP_AFFINITY" ]; then
      export KMP_AFFINITY="granularity=fine,compact,1,0"
    fi
    if [ -z "$OMP_DYNAMIC" ]; then
      export OMP_DYNAMIC="True"
    fi
  fi
}

function threads_config() {
  # auto set OMP_NUM_THREADS and MKL_NUM_THREADS
  # according to trainer_count and total processors
  # only when MKLDNN or MKLML enabled
  if [ "@WITH_MKLDNN@" == "OFF" ] && [ "@WITH_MKLML@" == "OFF"]; then
    return 0
  fi
  processors=`grep "processor" /proc/cpuinfo|sort -u|wc -l`
  trainers=`grep -Eo 'trainer_count.[0-9]+' <<< "$@" |grep -Eo '[0-9]+'|xargs`
  if [ -z $trainers ]; then
    trainers=1
  fi
82 83 84 85
  threads=$((processors / trainers))
  if [ $threads -eq 0 ]; then
    threads=1
  fi
86 87 88 89 90 91 92 93
  if [ -z "$OMP_NUM_THREADS" ]; then
    export OMP_NUM_THREADS=$threads
  fi
  if [ -z "$MKL_NUM_THREADS" ]; then
    export MKL_NUM_THREADS=$threads
  fi
}

Y
Yu Yang 已提交
94 95 96 97
PADDLE_CONF_HOME="$HOME/.config/paddle"
mkdir -p ${PADDLE_CONF_HOME}

if [ -z "${PADDLE_NO_STAT+x}" ]; then
Y
Yu Yang 已提交
98
    SERVER_VER=`curl -m 5 -X POST --data content="{ \"version\": \"@PADDLE_VERSION@\" }"\
Y
Yu Yang 已提交
99 100 101 102
        -b ${PADDLE_CONF_HOME}/paddle.cookie \
        -c ${PADDLE_CONF_HOME}/paddle.cookie \
        http://api.paddlepaddle.org/version 2>/dev/null`
    if [ $? -eq 0 ] && [ "$(ver2num @PADDLE_VERSION@)" -lt  $(ver2num $SERVER_VER) ]; then
103
      echo "Paddle release a new version ${SERVER_VER}, you can get the install package in http://www.paddlepaddle.org"
Y
Yu Yang 已提交
104 105 106
    fi
fi

107
PADDLE_BIN_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
Z
zhangjinchao01 已提交
108 109 110 111 112 113 114 115 116 117 118 119 120

if [ ! -z "${DEBUGGER}" ]; then
    echo "Using debug command ${DEBUGGER}"
fi

CUDNN_LIB_PATH="@CUDNN_LIB_PATH@"

if [ ! -z "${CUDNN_LIB_PATH}" ]; then
    export LD_LIBRARY_PATH=${CUDNN_LIB_PATH}:${LD_LIBRARY_PATH}
fi

export PYTHONPATH=${PWD}:${PYTHONPATH}

Y
Yu Yang 已提交
121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142

# Check python lib installed or not.
pip --help > /dev/null
if [ $? -ne 0 ]; then
    echo "pip should be installed to run paddle."
    exit 1
fi

INSTALLED_VERSION=`pip freeze 2>/dev/null | grep '^paddle' | sed 's/.*==//g'`

if [ -z ${INSTALLED_VERSION} ]; then
   INSTALLED_VERSION="0.0.0"  # not installed
fi
cat <<EOF | python -
from distutils.version import LooseVersion
import sys
if LooseVersion("${INSTALLED_VERSION}") < LooseVersion("@PADDLE_VERSION@"):
  sys.exit(1)
else:
  sys.exit(0)
EOF

143
cpu_config
T
tensor-tang 已提交
144
# echo $KMP_AFFINITY $OMP_DYNAMIC
Y
Yu Yang 已提交
145

Z
zhangjinchao01 已提交
146 147
case "$1" in
    "train")
148
        threads_config $@
T
tensor-tang 已提交
149
        # echo $OMP_NUM_THREADS $MKL_NUM_THREADS
150
        ${DEBUGGER} $PADDLE_BIN_PATH/paddle_trainer ${@:2}
Z
zhangjinchao01 已提交
151 152
        ;;
    "merge_model")
153
        ${DEBUGGER} $PADDLE_BIN_PATH/paddle_merge_model ${@:2}
Z
zhangjinchao01 已提交
154 155
        ;;
    "pserver")
156
        ${DEBUGGER} $PADDLE_BIN_PATH/paddle_pserver_main ${@:2}
Z
zhangjinchao01 已提交
157 158 159 160 161 162 163
        ;;
    "dump_config")
        python -m paddle.utils.dump_config ${@:2}
        ;;
    "make_diagram")
        python -m paddle.utils.make_model_diagram ${@:2}
        ;;
Z
zhouyingfeng 已提交
164
    "usage")
165
        $PADDLE_BIN_PATH/paddle_usage ${@:2}
Z
zhouyingfeng 已提交
166
        ;;
Z
zhangjinchao01 已提交
167 168 169 170 171 172 173 174 175 176
    "version")
        version
        ;;
    "--help")
        usage
        ;;
    *)
        usage
        ;;
 esac