benchmark.sh 2.1 KB
Newer Older
G
fix  
gentelyang 已提交
1
rm profile_log*
M
MRXLT 已提交
2 3 4
export CUDA_VISIBLE_DEVICES=0,1,2,3
export FLAGS_profile_server=1
export FLAGS_profile_client=1
M
MRXLT 已提交
5
python -m paddle_serving_server_gpu.serve --model $1 --port 9292 --thread 4 --gpu_ids 0,1,2,3 --mem_optim --ir_optim  2> elog > stdlog &
M
MRXLT 已提交
6 7

sleep 5
G
fix  
gentelyang 已提交
8 9 10 11 12 13 14
gpu_id=0
#save cpu and gpu utilization log
if [ -d utilization ];then
    rm -rf utilization
else
    mkdir utilization
fi
M
MRXLT 已提交
15 16

#warm up
G
fix  
gentelyang 已提交
17 18
$PYTHONROOT/bin/python3 benchmark.py --thread 4 --batch_size 1 --model $2/serving_client_conf.prototxt --request rpc > profile 2>&1
echo -e "import psutil\ncpu_utilization=psutil.cpu_percent(1,False)\nprint('CPU_UTILIZATION:', cpu_utilization)\n" > cpu_utilization.py
M
MRXLT 已提交
19

G
fix  
gentelyang 已提交
20
for thread_num in 1 4 8 16
M
MRXLT 已提交
21
do
M
MRXLT 已提交
22
for batch_size in 1 4 16 64
M
MRXLT 已提交
23
do
G
fix  
gentelyang 已提交
24 25 26 27
    job_bt=`date '+%Y%m%d%H%M%S'`
    nvidia-smi --id=0 --query-compute-apps=used_memory --format=csv -lms 100 > gpu_use.log 2>&1 &
    nvidia-smi --id=0 --query-gpu=utilization.gpu --format=csv -lms 100 > gpu_utilization.log 2>&1 &
    gpu_memory_pid=$!
M
MRXLT 已提交
28
    $PYTHONROOT/bin/python benchmark.py --thread $thread_num --batch_size $batch_size --model $2/serving_client_conf.prototxt --request rpc > profile 2>&1
G
fix  
gentelyang 已提交
29 30
    kill ${gpu_memory_pid}
    kill `ps -ef|grep used_memory|awk '{print $2}'`
M
MRXLT 已提交
31 32 33 34 35 36
    echo "model name :" $1
    echo "thread num :" $thread_num
    echo "batch size :" $batch_size
    echo "=================Done===================="
    echo "model name :$1" >> profile_log
    echo "batch size :$batch_size" >> profile_log
G
fix  
gentelyang 已提交
37 38 39 40
    job_et=`date '+%Y%m%d%H%M%S'`
    awk 'BEGIN {max = 0} {if(NR>1){if ($1 > max) max=$1}} END {print "MAX_GPU_MEMORY:", max}' gpu_use.log >> profile_log_$1
    awk 'BEGIN {max = 0} {if(NR>1){if ($1 > max) max=$1}} END {print "GPU_UTILIZATION:", max}' gpu_utilization.log >> profile_log_$1
    rm -rf gpu_use.log gpu_utilization.log
M
MRXLT 已提交
41
    $PYTHONROOT/bin/python ../util/show_profile.py profile $thread_num >> profile_log
M
MRXLT 已提交
42
    tail -n 8 profile >> profile_log
G
fix  
gentelyang 已提交
43
    echo "" >> profile_log_$1
M
MRXLT 已提交
44
done
M
MRXLT 已提交
45
done
M
MRXLT 已提交
46

G
fix  
gentelyang 已提交
47 48 49
#Divided log
awk 'BEGIN{RS="\n\n"}{i++}{print > "ResNet_log_"i}' profile_log_$1
mkdir $1_log && mv ResNet_log_* $1_log
M
MRXLT 已提交
50
ps -ef|grep 'serving'|grep -v grep|cut -c 9-15 | xargs kill -9