未验证 提交 ce16400d 编写于 作者: Y Yang Yang(Tony) 提交者: GitHub

make append activation in place by default (#9417)

上级 a7c48d5d
...@@ -398,7 +398,6 @@ class LayerHelper(object): ...@@ -398,7 +398,6 @@ class LayerHelper(object):
return input_var return input_var
if isinstance(act, basestring): if isinstance(act, basestring):
act = {'type': act} act = {'type': act}
tmp = self.create_tmp_variable(dtype=input_var.dtype)
if 'use_mkldnn' in self.kwargs: if 'use_mkldnn' in self.kwargs:
act['use_mkldnn'] = self.kwargs.get('use_mkldnn') act['use_mkldnn'] = self.kwargs.get('use_mkldnn')
...@@ -408,9 +407,9 @@ class LayerHelper(object): ...@@ -408,9 +407,9 @@ class LayerHelper(object):
self.append_op( self.append_op(
type=act_type, type=act_type,
inputs={"X": [input_var]}, inputs={"X": [input_var]},
outputs={"Out": [tmp]}, outputs={"Out": [input_var]},
attrs=act) attrs=act)
return tmp return input_var
def _get_default_initializer(self, dtype): def _get_default_initializer(self, dtype):
if dtype is None or dtype_is_floating(dtype) is True: if dtype is None or dtype_is_floating(dtype) is True:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册