未验证 提交 2dce4320 编写于 作者: N Nyakku Shigure 提交者: GitHub

[CodeStyle][py2] use new syntax for metaclass declaration (PEP 3115) (#47730)

上级 433e67bd
...@@ -30,7 +30,7 @@ __all__ = ['Fleet', 'DistributedOptimizer'] ...@@ -30,7 +30,7 @@ __all__ = ['Fleet', 'DistributedOptimizer']
__all__ += mode.__all__ __all__ += mode.__all__
class Fleet: class Fleet(metaclass=abc.ABCMeta):
""" """
Fleet is the base class, transpiler and pslib are implementation of Fleet. Fleet is the base class, transpiler and pslib are implementation of Fleet.
...@@ -41,8 +41,6 @@ class Fleet: ...@@ -41,8 +41,6 @@ class Fleet:
None None
""" """
__metaclass__ = abc.ABCMeta
def __init__(self, mode): def __init__(self, mode):
self._is_initialized = False self._is_initialized = False
self._mode = mode self._mode = mode
...@@ -268,7 +266,7 @@ class Fleet: ...@@ -268,7 +266,7 @@ class Fleet:
pass pass
class DistributedOptimizer: class DistributedOptimizer(metaclass=abc.ABCMeta):
""" """
DistributedOptimizer is a wrapper for paddle.fluid.optimizer DistributedOptimizer is a wrapper for paddle.fluid.optimizer
A user should pass a paddle.fluid.optimizer to DistributedOptimizer A user should pass a paddle.fluid.optimizer to DistributedOptimizer
...@@ -287,8 +285,6 @@ class DistributedOptimizer: ...@@ -287,8 +285,6 @@ class DistributedOptimizer:
""" """
__metaclass__ = abc.ABCMeta
def __init__(self, optimizer, strategy=None): def __init__(self, optimizer, strategy=None):
if ( if (
not isinstance(optimizer, SGD.__bases__) not isinstance(optimizer, SGD.__bases__)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册