未验证 提交 ddd60444 编写于 作者: C Chengmo 提交者: GitHub

Fix judge pslib transpiler (#23720)

* fix judge pslib & ranspiler
上级 de3e299d
...@@ -13,6 +13,10 @@ ...@@ -13,6 +13,10 @@
# limitations under the License. # limitations under the License.
"""Defination of device workers.""" """Defination of device workers."""
from __future__ import print_function
from paddle.fluid.incubate.fleet.parameter_server import version
__all__ = [ __all__ = [
'DeviceWorker', 'Hogwild', 'DownpourSGD', 'Section', 'DownpourSGDOPT' 'DeviceWorker', 'Hogwild', 'DownpourSGD', 'Section', 'DownpourSGDOPT'
] ]
...@@ -33,7 +37,7 @@ class DeviceWorker(object): ...@@ -33,7 +37,7 @@ class DeviceWorker(object):
def _set_infer(self, infer=False): def _set_infer(self, infer=False):
""" """
set inference flag for current device worker set inference flag for current device worker
Args: Args:
infer(bool): whether to do inference infer(bool): whether to do inference
""" """
...@@ -101,6 +105,9 @@ class Hogwild(DeviceWorker): ...@@ -101,6 +105,9 @@ class Hogwild(DeviceWorker):
if not opt_info: if not opt_info:
return return
if version.is_transpiler() and "fleet_desc" not in opt_info:
return
program_configs = opt_info["program_configs"] program_configs = opt_info["program_configs"]
downpour = trainer_desc.downpour_param downpour = trainer_desc.downpour_param
hogwild = trainer_desc.hogwild_param hogwild = trainer_desc.hogwild_param
...@@ -149,7 +156,7 @@ class Hogwild(DeviceWorker): ...@@ -149,7 +156,7 @@ class Hogwild(DeviceWorker):
i].slot_gradient) i].slot_gradient)
sparse_table.fea_dim = \ sparse_table.fea_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[ self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim i].accessor.fea_dim
# not use emb_dim # not use emb_dim
sparse_table.emb_dim = -1 sparse_table.emb_dim = -1
# not use hard code click # not use hard code click
...@@ -246,12 +253,12 @@ class DownpourSGD(DeviceWorker): ...@@ -246,12 +253,12 @@ class DownpourSGD(DeviceWorker):
"no_cvm"] == True: "no_cvm"] == True:
sparse_table.emb_dim = \ sparse_table.emb_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[ self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim i].accessor.fea_dim
sparse_table.fea_dim = sparse_table.emb_dim sparse_table.fea_dim = sparse_table.emb_dim
else: else:
sparse_table.emb_dim = \ sparse_table.emb_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[ self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim - 2 i].accessor.fea_dim - 2
sparse_table.fea_dim = sparse_table.emb_dim + 2 sparse_table.fea_dim = sparse_table.emb_dim + 2
# TODO(guru4elephant): hard code here, need to improve # TODO(guru4elephant): hard code here, need to improve
sparse_table.label_var_name = "click" sparse_table.label_var_name = "click"
...@@ -346,12 +353,12 @@ class DownpourSGDOPT(DeviceWorker): ...@@ -346,12 +353,12 @@ class DownpourSGDOPT(DeviceWorker):
"no_cvm"] == True: "no_cvm"] == True:
sparse_table.emb_dim = \ sparse_table.emb_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[ self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim i].accessor.fea_dim
sparse_table.fea_dim = sparse_table.emb_dim sparse_table.fea_dim = sparse_table.emb_dim
else: else:
sparse_table.emb_dim = \ sparse_table.emb_dim = \
self._fleet_desc.server_param.downpour_server_param.downpour_table_param[ self._fleet_desc.server_param.downpour_server_param.downpour_table_param[
i].accessor.fea_dim - 2 i].accessor.fea_dim - 2
sparse_table.fea_dim = sparse_table.emb_dim + 2 sparse_table.fea_dim = sparse_table.emb_dim + 2
# TODO(guru4elephant): hard code here, need to improve # TODO(guru4elephant): hard code here, need to improve
sparse_table.label_var_name = "click" sparse_table.label_var_name = "click"
...@@ -406,7 +413,7 @@ class Section(DeviceWorker): ...@@ -406,7 +413,7 @@ class Section(DeviceWorker):
cfg.program_desc.ParseFromString(program["program"]._get_desc() cfg.program_desc.ParseFromString(program["program"]._get_desc()
.serialize_to_string()) .serialize_to_string())
# TODO: why does not work # TODO: why does not work
#cfg.program_desc.CopyFrom(program.program._get_desc()) # cfg.program_desc.CopyFrom(program.program._get_desc())
place = pipeline_opt["place_list"][i] place = pipeline_opt["place_list"][i]
if isinstance(place, core.CPUPlace): if isinstance(place, core.CPUPlace):
cfg.place = cfg.CPUPlace cfg.place = cfg.CPUPlace
......
...@@ -20,19 +20,15 @@ import paddle.fluid as fluid ...@@ -20,19 +20,15 @@ import paddle.fluid as fluid
from paddle.fluid.executor import Executor from paddle.fluid.executor import Executor
from paddle.fluid.optimizer import SGD from paddle.fluid.optimizer import SGD
from paddle.fluid.incubate.fleet.base.mode import Mode
from paddle.fluid.incubate.fleet.base.role_maker import MPISymetricRoleMaker from paddle.fluid.incubate.fleet.base.role_maker import MPISymetricRoleMaker
from paddle.fluid.incubate.fleet.base.role_maker import RoleMakerBase from paddle.fluid.incubate.fleet.base.role_maker import RoleMakerBase
from paddle.fluid.incubate.fleet.base.role_maker import UserDefinedRoleMaker from paddle.fluid.incubate.fleet.base.role_maker import UserDefinedRoleMaker
from paddle.fluid.contrib.mixed_precision.decorator import OptimizerWithMixedPrecision from paddle.fluid.contrib.mixed_precision.decorator import OptimizerWithMixedPrecision
from . import mode
__all__ = ['Fleet', 'DistributedOptimizer']
class Mode: __all__ += mode.__all__
"""
There are various mode for fleet, each of them is designed for different model.
"""
TRANSPILER = 1
PSLIB = 2
COLLECTIVE = 3
class Fleet(object): class Fleet(object):
...@@ -275,7 +271,7 @@ class DistributedOptimizer(object): ...@@ -275,7 +271,7 @@ class DistributedOptimizer(object):
def __init__(self, optimizer, strategy=None): def __init__(self, optimizer, strategy=None):
if not isinstance(optimizer, SGD.__bases__) \ if not isinstance(optimizer, SGD.__bases__) \
and not isinstance(optimizer, OptimizerWithMixedPrecision): and not isinstance(optimizer, OptimizerWithMixedPrecision):
raise TypeError("optimizer must be an instance of Optimizer") raise TypeError("optimizer must be an instance of Optimizer")
self._optimizer = optimizer self._optimizer = optimizer
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
__all__ = ['Mode']
class Mode:
"""
There are various mode for fleet, each of them is designed for different model.
"""
TRANSPILER = 1
PSLIB = 2
COLLECTIVE = 3
...@@ -5,6 +5,7 @@ import re ...@@ -5,6 +5,7 @@ import re
import shutil import shutil
import sys import sys
import fnmatch import fnmatch
import errno
from contextlib import contextmanager from contextlib import contextmanager
from setuptools import Command from setuptools import Command
...@@ -103,6 +104,34 @@ def mkl(): ...@@ -103,6 +104,34 @@ def mkl():
write_version_py(filename='@PADDLE_BINARY_DIR@/python/paddle/version.py') write_version_py(filename='@PADDLE_BINARY_DIR@/python/paddle/version.py')
def write_distributed_training_mode_py(filename='paddle/fluid/incubate/fleet/parameter_server/version.py'):
cnt = '''from __future__ import print_function
# THIS FILE IS GENERATED FROM PADDLEPADDLE SETUP.PY
from paddle.fluid.incubate.fleet.base.mode import Mode
BUILD_MODE=Mode.%(mode)s
def is_transpiler():
return Mode.TRANSPILER == BUILD_MODE
'''
dirname = os.path.dirname(filename)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(filename, 'w') as f:
f.write(cnt % {
'mode': 'PSLIB' if '${WITH_PSLIB}' == 'ON' else 'TRANSPILER'
})
write_distributed_training_mode_py(filename='@PADDLE_BINARY_DIR@/python/paddle/fluid/incubate/fleet/parameter_server/version.py')
packages=['paddle', packages=['paddle',
'paddle.libs', 'paddle.libs',
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册