未验证 提交 278dd003 编写于 作者: T tangwei12 提交者: GitHub

paddle cloud role maker fix (#19646)

* optimize cloud rolemaker, test=develop
上级 4155e625
...@@ -334,45 +334,48 @@ class PaddleCloudRoleMaker(RoleMakerBase): ...@@ -334,45 +334,48 @@ class PaddleCloudRoleMaker(RoleMakerBase):
def generate_role(self): def generate_role(self):
if not self._role_is_generated: if not self._role_is_generated:
if not self._is_collective: if not self._is_collective:
self.port = os.getenv("PADDLE_PORT", try:
"6174") # port of current server port = os.environ["PADDLE_PORT"]
self.pserver_ips = os.getenv("PADDLE_PSERVERS", pserver_ips = os.environ["PADDLE_PSERVERS"].split(",")
"") # ip of server if "," in port:
ports = port.split(",")
if "," in self.port:
ports = self.port.split(",")
else: else:
ports = [self.port for i in self.pserver_ips.split(",")] ports = [port] * len(pserver_ips)
eplist = [] eplist = []
# note that, we usually assign the same port to different ips # note that, we usually assign the same port to different ips
# if we run parameter server training in local mode # if we run parameter server training in local mode
# port should be different in environment variables # port should be different in environment variables
for i, ip in enumerate(self.pserver_ips.split(",")): for i, ip in enumerate(pserver_ips):
eplist.append(':'.join([ip, ports[i]])) eplist.append(':'.join([ip, ports[i]]))
self.endpoints = ",".join(eplist)
self._trainers_num = int(os.getenv("PADDLE_TRAINERS_NUM", "1")) trainers_num = int(os.environ["PADDLE_TRAINERS_NUM"])
# ip of current node, either a worker or a pserver training_role = os.environ["TRAINING_ROLE"]
current_ip = os.getenv("POD_IP", "")
if current_ip == "": if training_role not in ["TRAINER", "PSERVER"]:
self._current_endpoint = os.getenv("CURRENT_ENDPOINT") raise ValueError(
else: "TRAINING_ROLE must be PSERVER or TRAINER")
self._current_endpoint = current_ip + ports[0]
self.role = os.getenv("PADDLE_TRAINING_ROLE", "TRAINER") if training_role == "TRAINER":
# for trainer, only POD_IP and current trainer id is needed role = Role.WORKER
# we usually do not need to know other trainer ips current_id = int(os.environ["PADDLE_TRAINER_ID"])
self.trainer_id = int(os.getenv("PADDLE_TRAINER_ID", "0")) elif training_role == "PSERVER":
self.eplist = eplist role = Role.SERVER
self.endpoints = self.endpoints.split(",") cur_ip = os.environ["POD_IP"]
self._server_endpoints = self.endpoints cur_idx = pserver_ips.index(cur_ip)
self._worker_endpoints = self.endpoints current_id = eplist.index(":".join(
if self.role.upper() == "PSERVER": [cur_ip, ports[cur_idx]]))
# current endpoint index among all pservers
self._current_id = self.endpoints.index(
self._current_endpoint)
self._role = Role.SERVER
else: else:
self._current_id = self.trainer_id raise ValueError(
self._role = Role.WORKER "TRAINING_ROLE must be PSERVER or TRAINER")
except ValueError as ve:
raise ValueError(
"something wrong with PaddleCloud, please check environment"
)
self._trainers_num = trainers_num
self._server_endpoints = eplist
self._role = role
self._current_id = current_id
else: else:
self._current_id = int(os.getenv("PADDLE_TRAINER_ID", "0")) self._current_id = int(os.getenv("PADDLE_TRAINER_ID", "0"))
self._training_role = os.getenv("PADDLE_TRAINING_ROLE", self._training_role = os.getenv("PADDLE_TRAINING_ROLE",
......
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import unittest
import paddle.fluid.incubate.fleet.base.role_maker as role_maker
class TestCloudRoleMaker(unittest.TestCase):
def setUp(self):
os.environ["PADDLE_PORT"] = "36001"
os.environ["PADDLE_PSERVERS"] = "127.0.0.1,127.0.0.2"
os.environ["PADDLE_TRAINERS_NUM"] = "2"
def test_tr_rolemaker(self):
os.environ["TRAINING_ROLE"] = "TRAINER"
os.environ["PADDLE_TRAINER_ID"] = "0"
ro = role_maker.PaddleCloudRoleMaker(is_collective=False)
ro.generate_role()
self.assertTrue(ro.is_worker())
self.assertFalse(ro.is_server())
self.assertEqual(ro.worker_num(), 2)
def test_ps_rolemaker(self):
os.environ["TRAINING_ROLE"] = "PSERVER"
os.environ["POD_IP"] = "127.0.0.1"
ro = role_maker.PaddleCloudRoleMaker(is_collective=False)
ro.generate_role()
self.assertFalse(ro.is_worker())
self.assertTrue(ro.is_server())
self.assertEqual(ro.worker_num(), 2)
def test_traing_role(self):
os.environ["TRAINING_ROLE"] = "TEST"
ro = role_maker.PaddleCloudRoleMaker(is_collective=False)
self.assertRaises(ValueError, ro.generate_role)
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册