提交 36ec3434 编写于 作者: S sneaxiy

fix_api_kwargs

上级 5da4ece4
...@@ -56,6 +56,8 @@ for _OP in set(__all__): ...@@ -56,6 +56,8 @@ for _OP in set(__all__):
# e.g.: test_program_code.py, test_dist_train.py # e.g.: test_program_code.py, test_dist_train.py
globals()['_scale'] = generate_layer_fn('scale') globals()['_scale'] = generate_layer_fn('scale')
globals()['_elementwise_div'] = generate_layer_fn('elementwise_div')
__all__ += __activations_noattr__ __all__ += __activations_noattr__
for _OP in set(__activations_noattr__): for _OP in set(__activations_noattr__):
......
...@@ -26,6 +26,7 @@ from .layer_helper import LayerHelper ...@@ -26,6 +26,7 @@ from .layer_helper import LayerHelper
from .regularizer import append_regularization_ops from .regularizer import append_regularization_ops
from .clip import append_gradient_clip_ops, error_clip_callback from .clip import append_gradient_clip_ops, error_clip_callback
from contextlib import contextmanager from contextlib import contextmanager
from .layers import ops
__all__ = [ __all__ = [
'SGD', 'Momentum', 'Adagrad', 'Adam', 'Adamax', 'DecayedAdagrad', 'Ftrl', 'SGD', 'Momentum', 'Adagrad', 'Adam', 'Adamax', 'DecayedAdagrad', 'Ftrl',
...@@ -1301,7 +1302,7 @@ class ModelAverage(Optimizer): ...@@ -1301,7 +1302,7 @@ class ModelAverage(Optimizer):
x=tmp, dtype='float32' if self._dtype == None else self._dtype) x=tmp, dtype='float32' if self._dtype == None else self._dtype)
sum = layers.cast( sum = layers.cast(
x=sum, dtype='float32' if self._dtype == None else self._dtype) x=sum, dtype='float32' if self._dtype == None else self._dtype)
layers.elementwise_div(x=sum, y=tmp, out=param) ops._elementwise_div(x=sum, y=tmp, out=param)
def _add_average_restore_op(self, block, param_grad): def _add_average_restore_op(self, block, param_grad):
param = block._clone_variable(param_grad[0]) param = block._clone_variable(param_grad[0])
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册