提交 b7634a8d 编写于 作者: W wanghaoshuang 提交者: guosheng

Fix relu and log op.

上级 9cd020ca
......@@ -4941,9 +4941,9 @@ def log(input):
output = fluid.layers.log(input)
"""
helper = LayerHelper('log', **locals())
dtype = helper.input_dtype()
dtype = helper.input_dtype(input_param_name='x')
out = helper.create_tmp_variable(dtype)
helper.append_op(type="log", inputs={"X": input}, outputs={"Out": out})
helper.append_op(type="log", inputs={"X": x}, outputs={"Out": out})
return out
......@@ -4970,9 +4970,9 @@ def relu(input):
output = fluid.layers.relu(input)
"""
helper = LayerHelper('relu', **locals())
dtype = helper.input_dtype()
dtype = helper.input_dtype(input_param_name='x')
out = helper.create_tmp_variable(dtype)
helper.append_op(type="relu", inputs={"X": input}, outputs={"Out": out})
helper.append_op(type="relu", inputs={"X": x}, outputs={"Out": out})
return out
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册