From 3fe63d6780eb73806e29acb6b8d4a73dd59ca97c Mon Sep 17 00:00:00 2001 From: danleifeng <52735331+danleifeng@users.noreply.github.com> Date: Mon, 18 Nov 2019 14:39:46 +0800 Subject: [PATCH] add store_true to use_paddlecloud argument in launch.py (#21168) --- python/paddle/distributed/launch.py | 6 +++--- python/paddle/fluid/tests/unittests/test_launch.sh | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/python/paddle/distributed/launch.py b/python/paddle/distributed/launch.py index 73e91abbd4..3e2f2e59f9 100644 --- a/python/paddle/distributed/launch.py +++ b/python/paddle/distributed/launch.py @@ -95,9 +95,9 @@ POD_IP (current node ip address, not needed for local training) help="The current node ip. ") parser.add_argument( "--use_paddlecloud", - type=bool, - default="False", - help="wheter to use paddlecloud platform to run your multi-process job.") + action='store_true', + help="wheter to use paddlecloud platform to run your multi-process job. If false, no need to set this argument." + ) parser.add_argument( "--started_port", type=int, diff --git a/python/paddle/fluid/tests/unittests/test_launch.sh b/python/paddle/fluid/tests/unittests/test_launch.sh index 1419ba7335..d3b8d34e49 100644 --- a/python/paddle/fluid/tests/unittests/test_launch.sh +++ b/python/paddle/fluid/tests/unittests/test_launch.sh @@ -11,7 +11,8 @@ export POD_IP=127.0.0.1 export PADDLE_TRAINERS=127.0.0.1,127.0.0.2 export PADDLE_TRAINER_ID=0 -distributed_args="--use_paddlecloud True --cluster_node_ips ${cluster_node_ips} --node_ip ${node_ip} --selected_gpus=0,1 --log_dir testlog" +distributed_args="--use_paddlecloud --cluster_node_ips=${cluster_node_ips} --node_ip=${node_ip} +--selected_gpus=0,1 --log_dir=testlog" python -m paddle.distributed.launch ${distributed_args} multi_process.py str1="selected_gpus:0 worker_endpoints:127.0.0.1:6170,127.0.0.1:6171,127.0.0.2:6170,127.0.0.2:6171 trainers_num:4 current_endpoint:127.0.0.1:6170 trainer_id:0" -- GitLab