test_launch.sh 1.1 KB
Newer Older
G
gongweibao 已提交
1
#!/bin/bash
2
set -ex
G
gongweibao 已提交
3 4 5
# use default values
python -m paddle.distributed.launch multi_process.py

6 7 8 9 10 11 12
# use paddlecloud
cluster_node_ips="10.0.0.1"
node_ip="10.0.0.1"
export PADDLE_TRAINERS_NUM=2
export POD_IP=127.0.0.1
export PADDLE_TRAINERS=127.0.0.1,127.0.0.2
export PADDLE_TRAINER_ID=0
G
gongweibao 已提交
13

14
distributed_args="--use_paddlecloud True --cluster_node_ips ${cluster_node_ips} --node_ip ${node_ip} --selected_gpus=0,1 --log_dir testlog"
G
gongweibao 已提交
15 16
python -m paddle.distributed.launch ${distributed_args} multi_process.py

17 18 19 20
str1="selected_gpus:0 worker_endpoints:127.0.0.1:6170,127.0.0.1:6171,127.0.0.2:6170,127.0.0.2:6171 trainers_num:4 current_endpoint:127.0.0.1:6170 trainer_id:0"
str2="selected_gpus:1 worker_endpoints:127.0.0.1:6170,127.0.0.1:6171,127.0.0.2:6170,127.0.0.2:6171 trainers_num:4 current_endpoint:127.0.0.1:6171 trainer_id:1"
file_0="multi_process.check_0.log"
file_1="multi_process.check_1.log"
G
gongweibao 已提交
21

22 23
echo "paddlecloud params test"
if grep -q "$str1" "$file_0"; then
G
gongweibao 已提交
24 25 26 27 28 29
    echo "find trainer 0"
else
    echo "not find trainer 0"
    exit -1
fi

30
if grep -q "$str2" "$file_1"; then
G
gongweibao 已提交
31 32
    echo "find trainer 1"
else
33
    echo "not find trainer 1"
G
gongweibao 已提交
34 35
    exit -1
fi