提交 6f6642ed 编写于 作者: W wanghaoshuang 提交者: Qingsheng Li

Fix relu and log op.

上级 9b3f48d7
......@@ -4938,9 +4938,9 @@ def log(x):
output = fluid.layers.log(x)
"""
helper = LayerHelper('log', **locals())
dtype = helper.input_dtype()
dtype = helper.input_dtype(input_param_name='x')
out = helper.create_tmp_variable(dtype)
helper.append_op(type="log", inputs={"X": input}, outputs={"Out": out})
helper.append_op(type="log", inputs={"X": x}, outputs={"Out": out})
return out
......@@ -4967,9 +4967,9 @@ def relu(x):
output = fluid.layers.relu(x)
"""
helper = LayerHelper('relu', **locals())
dtype = helper.input_dtype()
dtype = helper.input_dtype(input_param_name='x')
out = helper.create_tmp_variable(dtype)
helper.append_op(type="relu", inputs={"X": input}, outputs={"Out": out})
helper.append_op(type="relu", inputs={"X": x}, outputs={"Out": out})
return out
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册