未验证 提交 21fc789e 编写于 作者: C Chengmo 提交者: GitHub

[Cherry pick] ddd60444, fix judge pslib & transpiler (#23855)

* Fix judge pslib transpiler cherry-pick (#23720)
上级 432b5df8
......@@ -13,6 +13,10 @@
# limitations under the License.
"""Defination of device workers."""
from __future__ import print_function
from paddle.fluid.incubate.fleet.parameter_server import version
__all__ = [
'DeviceWorker', 'Hogwild', 'DownpourSGD', 'Section', 'DownpourSGDOPT'
]
......@@ -33,7 +37,7 @@ class DeviceWorker(object):
def _set_infer(self, infer=False):
"""
set inference flag for current device worker
Args:
infer(bool): whether to do inference
"""
......@@ -101,6 +105,10 @@ class Hogwild(DeviceWorker):
if not opt_info:
return
# when runing in pslib mode, opt_info has fleet_desc
if version.is_transpiler() and "fleet_desc" not in opt_info:
return
program_configs = opt_info["program_configs"]
downpour = trainer_desc.downpour_param
hogwild = trainer_desc.hogwild_param
......@@ -149,7 +157,7 @@ class Hogwild(DeviceWorker):
i].slot_gradient)
sparse_table.fea_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim
i].accessor.fea_dim
# not use emb_dim
sparse_table.emb_dim = -1
# not use hard code click
......@@ -246,12 +254,12 @@ class DownpourSGD(DeviceWorker):
"no_cvm"] == True:
sparse_table.emb_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim
i].accessor.fea_dim
sparse_table.fea_dim = sparse_table.emb_dim
else:
sparse_table.emb_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim - 2
i].accessor.fea_dim - 2
sparse_table.fea_dim = sparse_table.emb_dim + 2
# TODO(guru4elephant): hard code here, need to improve
sparse_table.label_var_name = "click"
......@@ -346,12 +354,12 @@ class DownpourSGDOPT(DeviceWorker):
"no_cvm"] == True:
sparse_table.emb_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim
i].accessor.fea_dim
sparse_table.fea_dim = sparse_table.emb_dim
else:
sparse_table.emb_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim - 2
i].accessor.fea_dim - 2
sparse_table.fea_dim = sparse_table.emb_dim + 2
# TODO(guru4elephant): hard code here, need to improve
sparse_table.label_var_name = "click"
......@@ -406,7 +414,7 @@ class Section(DeviceWorker):
cfg.program_desc.ParseFromString(program["program"]._get_desc()
.serialize_to_string())
# TODO: why does not work
#cfg.program_desc.CopyFrom(program.program._get_desc())
# cfg.program_desc.CopyFrom(program.program._get_desc())
place = pipeline_opt["place_list"][i]
if isinstance(place, core.CPUPlace):
cfg.place = cfg.CPUPlace
......
......@@ -20,19 +20,15 @@ import paddle.fluid as fluid
from paddle.fluid.executor import Executor
from paddle.fluid.optimizer import SGD
from paddle.fluid.incubate.fleet.base.mode import Mode
from paddle.fluid.incubate.fleet.base.role_maker import MPISymetricRoleMaker
from paddle.fluid.incubate.fleet.base.role_maker import RoleMakerBase
from paddle.fluid.incubate.fleet.base.role_maker import UserDefinedRoleMaker
from paddle.fluid.contrib.mixed_precision.decorator import OptimizerWithMixedPrecision
from . import mode
class Mode:
"""
There are various mode for fleet, each of them is designed for different model.
"""
TRANSPILER = 1
PSLIB = 2
COLLECTIVE = 3
__all__ = ['Fleet', 'DistributedOptimizer']
__all__ += mode.__all__
class Fleet(object):
......@@ -275,7 +271,7 @@ class DistributedOptimizer(object):
def __init__(self, optimizer, strategy=None):
if not isinstance(optimizer, SGD.__bases__) \
and not isinstance(optimizer, OptimizerWithMixedPrecision):
and not isinstance(optimizer, OptimizerWithMixedPrecision):
raise TypeError("optimizer must be an instance of Optimizer")
self._optimizer = optimizer
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
__all__ = ['Mode']
class Mode:
"""
There are various mode for fleet, each of them is designed for different model.
"""
TRANSPILER = 1
PSLIB = 2
COLLECTIVE = 3
......@@ -5,6 +5,7 @@ import re
import shutil
import sys
import fnmatch
import errno
from contextlib import contextmanager
from setuptools import Command
......@@ -103,6 +104,34 @@ def mkl():
write_version_py(filename='@PADDLE_BINARY_DIR@/python/paddle/version.py')
def write_distributed_training_mode_py(filename='paddle/fluid/incubate/fleet/parameter_server/version.py'):
cnt = '''from __future__ import print_function
# THIS FILE IS GENERATED FROM PADDLEPADDLE SETUP.PY
from paddle.fluid.incubate.fleet.base.mode import Mode
BUILD_MODE=Mode.%(mode)s
def is_transpiler():
return Mode.TRANSPILER == BUILD_MODE
'''
dirname = os.path.dirname(filename)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(filename, 'w') as f:
f.write(cnt % {
'mode': 'PSLIB' if '${WITH_PSLIB}' == 'ON' else 'TRANSPILER'
})
write_distributed_training_mode_py(filename='@PADDLE_BINARY_DIR@/python/paddle/fluid/incubate/fleet/parameter_server/version.py')
packages=['paddle',
'paddle.libs',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册