提交 f9223c5f 编写于 作者: Z zhoukunsheng 提交者: Zeng Jinle

Logical compare (#16513)

* test=develop
update API.spec with hash, resolve conflict

* remove unused parameter
上级 0114f1d6
...@@ -286,7 +286,11 @@ paddle.fluid.layers.increment (ArgSpec(args=['x', 'value', 'in_place'], varargs= ...@@ -286,7 +286,11 @@ paddle.fluid.layers.increment (ArgSpec(args=['x', 'value', 'in_place'], varargs=
paddle.fluid.layers.array_write (ArgSpec(args=['x', 'i', 'array'], varargs=None, keywords=None, defaults=(None,)), ('document', '40b6d15f4c86b2b09df340d7778ad713')) paddle.fluid.layers.array_write (ArgSpec(args=['x', 'i', 'array'], varargs=None, keywords=None, defaults=(None,)), ('document', '40b6d15f4c86b2b09df340d7778ad713'))
paddle.fluid.layers.create_array (ArgSpec(args=['dtype'], varargs=None, keywords=None, defaults=None), ('document', '2d4f20087080ba5105b55205ad5c5b6a')) paddle.fluid.layers.create_array (ArgSpec(args=['dtype'], varargs=None, keywords=None, defaults=None), ('document', '2d4f20087080ba5105b55205ad5c5b6a'))
paddle.fluid.layers.less_than (ArgSpec(args=['x', 'y', 'force_cpu', 'cond'], varargs=None, keywords=None, defaults=(None, None)), ('document', '067bbc799c66289ca8b8924c26b6673f')) paddle.fluid.layers.less_than (ArgSpec(args=['x', 'y', 'force_cpu', 'cond'], varargs=None, keywords=None, defaults=(None, None)), ('document', '067bbc799c66289ca8b8924c26b6673f'))
paddle.fluid.layers.less_equal (ArgSpec(args=['x', 'y', 'cond'], varargs=None, keywords=None, defaults=(None,)), ('document', 'd6b173ae1a149e0bdfe7b8bf69285957'))
paddle.fluid.layers.greater_than (ArgSpec(args=['x', 'y', 'cond'], varargs=None, keywords=None, defaults=(None,)), ('document', '2c9bd414caa6c615539018d27001b44c'))
paddle.fluid.layers.greater_equal (ArgSpec(args=['x', 'y', 'cond'], varargs=None, keywords=None, defaults=(None,)), ('document', '62c667d24e7b07e166b47a53b61b2ff4'))
paddle.fluid.layers.equal (ArgSpec(args=['x', 'y', 'cond'], varargs=None, keywords=None, defaults=(None,)), ('document', '80c29b1dc64718f0116de90d1ac88a77')) paddle.fluid.layers.equal (ArgSpec(args=['x', 'y', 'cond'], varargs=None, keywords=None, defaults=(None,)), ('document', '80c29b1dc64718f0116de90d1ac88a77'))
paddle.fluid.layers.not_equal (ArgSpec(args=['x', 'y', 'cond'], varargs=None, keywords=None, defaults=(None,)), ('document', '56148fb1024687a08e96af79bdc5c929'))
paddle.fluid.layers.array_read (ArgSpec(args=['array', 'i'], varargs=None, keywords=None, defaults=None), ('document', 'dd68bead34dfbaf6b0a163fc1cc3c385')) paddle.fluid.layers.array_read (ArgSpec(args=['array', 'i'], varargs=None, keywords=None, defaults=None), ('document', 'dd68bead34dfbaf6b0a163fc1cc3c385'))
paddle.fluid.layers.array_length (ArgSpec(args=['array'], varargs=None, keywords=None, defaults=None), ('document', 'ffb8b9578ec66db565b223d313aa82a2')) paddle.fluid.layers.array_length (ArgSpec(args=['array'], varargs=None, keywords=None, defaults=None), ('document', 'ffb8b9578ec66db565b223d313aa82a2'))
paddle.fluid.layers.IfElse.__init__ (ArgSpec(args=['self', 'cond', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) paddle.fluid.layers.IfElse.__init__ (ArgSpec(args=['self', 'cond', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', '6adf97f83acf6453d4a6a4b1070f3754'))
......
...@@ -29,7 +29,8 @@ from functools import reduce ...@@ -29,7 +29,8 @@ from functools import reduce
__all__ = [ __all__ = [
'While', 'Switch', 'increment', 'array_write', 'create_array', 'less_than', 'While', 'Switch', 'increment', 'array_write', 'create_array', 'less_than',
'equal', 'array_read', 'array_length', 'IfElse', 'DynamicRNN', 'StaticRNN', 'less_equal', 'greater_than', 'greater_equal', 'equal', 'not_equal',
'array_read', 'array_length', 'IfElse', 'DynamicRNN', 'StaticRNN',
'reorder_lod_tensor_by_rank', 'Print', 'is_empty' 'reorder_lod_tensor_by_rank', 'Print', 'is_empty'
] ]
...@@ -972,6 +973,114 @@ def less_than(x, y, force_cpu=None, cond=None): ...@@ -972,6 +973,114 @@ def less_than(x, y, force_cpu=None, cond=None):
return cond return cond
@templatedoc()
def less_equal(x, y, cond=None):
"""
This layer returns the truth value of :math:`x <= y` elementwise, which is equivalent to the overloaded operator `<=`.
Args:
x(Variable): First operand of *less_equal*
y(Variable): Second operand of *less_equal*
cond(Variable|None): Optional output variable to store the result of *less_equal*
Returns:
Variable: The tensor variable storing the output of *less_equal*.
Examples:
.. code-block:: python
out = fluid.layers.less_equal(x=label, y=limit)
"""
helper = LayerHelper("less_equal", **locals())
if cond is None:
cond = helper.create_variable_for_type_inference(dtype='bool')
cond.stop_gradient = True
attrs = dict()
if force_init_on_cpu():
attrs['force_cpu'] = force_init_on_cpu()
helper.append_op(
type='less_equal',
inputs={'X': [x],
'Y': [y]},
outputs={'Out': [cond]},
attrs=attrs)
return cond
@templatedoc()
def greater_than(x, y, cond=None):
"""
This layer returns the truth value of :math:`x > y` elementwise, which is equivalent to the overloaded operator `>`.
Args:
x(Variable): First operand of *greater_than*
y(Variable): Second operand of *greater_than*
cond(Variable|None): Optional output variable to store the result of *greater_than*
Returns:
Variable: The tensor variable storing the output of *greater_than*.
Examples:
.. code-block:: python
out = fluid.layers.greater_than(x=label, y=limit)
"""
helper = LayerHelper("greater_than", **locals())
if cond is None:
cond = helper.create_variable_for_type_inference(dtype='bool')
cond.stop_gradient = True
attrs = dict()
if force_init_on_cpu():
attrs['force_cpu'] = force_init_on_cpu()
helper.append_op(
type='greater_than',
inputs={'X': [x],
'Y': [y]},
outputs={'Out': [cond]},
attrs=attrs)
return cond
@templatedoc()
def greater_equal(x, y, cond=None):
"""
This layer returns the truth value of :math:`x >= y` elementwise, which is equivalent to the overloaded operator `>=`.
Args:
x(Variable): First operand of *greater_equal*
y(Variable): Second operand of *greater_equal*
cond(Variable|None): Optional output variable to store the result of *greater_equal*
Returns:
Variable: The tensor variable storing the output of *greater_equal*.
Examples:
.. code-block:: python
out = fluid.layers.greater_equal(x=label, y=limit)
"""
helper = LayerHelper("greater_equal", **locals())
if cond is None:
cond = helper.create_variable_for_type_inference(dtype='bool')
cond.stop_gradient = True
attrs = dict()
if force_init_on_cpu():
attrs['force_cpu'] = force_init_on_cpu()
helper.append_op(
type='greater_equal',
inputs={'X': [x],
'Y': [y]},
outputs={'Out': [cond]},
attrs=attrs)
return cond
def equal(x, y, cond=None): def equal(x, y, cond=None):
""" """
This layer returns the truth value of :math:`x == y` elementwise. This layer returns the truth value of :math:`x == y` elementwise.
...@@ -1000,6 +1109,34 @@ def equal(x, y, cond=None): ...@@ -1000,6 +1109,34 @@ def equal(x, y, cond=None):
return cond return cond
def not_equal(x, y, cond=None):
"""
This layer returns the truth value of :math:`x != y` elementwise, which is equivalent to the overloader operator `!=`.
Args:
x(Variable): First operand of *not_equal*
y(Variable): Second operand of *not_equal*
cond(Variable|None): Optional output variable to store the result of *not_equal*
Returns:
Variable: The tensor variable storing the output of *not_equal*.
Examples:
.. code-block:: python
out = fluid.layers.not_equal(x=label, y=limit)
"""
helper = LayerHelper("not_equal", **locals())
if cond is None:
cond = helper.create_variable_for_type_inference(dtype='bool')
cond.stop_gradient = True
helper.append_op(
type='not_equal', inputs={'X': [x],
'Y': [y]}, outputs={'Out': [cond]})
return cond
def array_read(array, i): def array_read(array, i):
""" """
This function performs the operation to read the data in as an This function performs the operation to read the data in as an
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册