From 1a92098a4173b6949859498cc91d551b5091ac79 Mon Sep 17 00:00:00 2001 From: 201716010711 <87008376+201716010711@users.noreply.github.com> Date: Mon, 28 Nov 2022 10:28:28 +0800 Subject: [PATCH] clean fluid task: transfer logical_and api (#48341) --- .../dygraph_to_static/convert_operators.py | 3 +- python/paddle/fluid/layers/control_flow.py | 5 +-- python/paddle/fluid/layers/nn.py | 41 ------------------- python/paddle/fluid/layers/rnn.py | 2 +- .../tests/book/test_machine_translation.py | 2 +- .../fluid/tests/unittests/dist_transformer.py | 2 +- .../unittests/dygraph_to_static/test_bmn.py | 4 +- .../unittests/ipu/test_logical_x_op_ipu.py | 2 +- 8 files changed, 9 insertions(+), 52 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py index 34788b754e8..91084b3ff40 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py @@ -36,7 +36,6 @@ from paddle.fluid.layers import ( from paddle.fluid.layers import ( cast, control_flow, - logical_and, nn, ) from paddle.fluid.layers.control_flow import ( @@ -233,7 +232,7 @@ def convert_logical_and(x_func, y_func): def _run_paddle_logical_and(x, y): x = cast_bool_if_necessary(x) y = cast_bool_if_necessary(y) - return logical_and(x, y) + return paddle.logical_and(x, y) def _run_py_logical_and(x_func, y_func): diff --git a/python/paddle/fluid/layers/control_flow.py b/python/paddle/fluid/layers/control_flow.py index 0242af9cb2d..2e456e7aa31 100755 --- a/python/paddle/fluid/layers/control_flow.py +++ b/python/paddle/fluid/layers/control_flow.py @@ -27,7 +27,6 @@ from ..framework import ( in_dygraph_mode, ) from ..layer_helper import LayerHelper, unique_name -from .nn import logical_and from .utils import ( assert_same_structure, map_structure, @@ -3278,12 +3277,12 @@ class Switch: else: pre_cond_num = len(self.pre_not_conditions) pre_not_cond = self.pre_not_conditions[pre_cond_num - 1] - new_not_cond = logical_and( + new_not_cond = paddle.logical_and( x=pre_not_cond, y=paddle.logical_not(x=condition) ) self.pre_not_conditions.append(new_not_cond) cond_block = ConditionalBlock( - [logical_and(x=pre_not_cond, y=condition)], + [paddle.logical_and(x=pre_not_cond, y=condition)], is_scalar_condition=True, ) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 763ee1366ab..c28a7a35384 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -124,7 +124,6 @@ __all__ = [ 'strided_slice', 'shape', 'size', - 'logical_and', 'clip', 'clip_by_norm', 'mean', @@ -8435,46 +8434,6 @@ def _logical_op(op_name, x, y, out=None, name=None, binary_op=True): return out -def logical_and(x, y, out=None, name=None): - r""" - - ``logical_and`` operator computes element-wise logical AND on ``x`` and ``y``, and returns ``out``. ``out`` is N-dim boolean ``Tensor``. - Each element of ``out`` is calculated by - - .. math:: - - out = x \&\& y - - .. note:: - ``paddle.logical_and`` supports broadcasting. If you want know more about broadcasting, please refer to :ref:`user_guide_broadcasting`. - - Args: - x (Tensor): the input tensor, it's data type should be one of bool, int8, int16, in32, in64, float32, float64. - y (Tensor): the input tensor, it's data type should be one of bool, int8, int16, in32, in64, float32, float64. - out(Tensor): The ``Tensor`` that specifies the output of the operator, which can be any ``Tensor`` that has been created in the program. The default value is None, and a new ``Tensor`` will be created to save the output. - name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. - - Returns: - N-D Tensor. A location into which the result is stored. It's dimension equals with ``x``. - - Examples: - .. code-block:: python - - import paddle - - x = paddle.to_tensor([True]) - y = paddle.to_tensor([True, False, True, False]) - res = paddle.logical_and(x, y) - print(res) # [True False True False] - """ - if in_dygraph_mode(): - return _C_ops.logical_and(x, y) - - return _logical_op( - op_name="logical_and", x=x, y=y, name=name, out=out, binary_op=True - ) - - @templatedoc() def clip(x, min, max, name=None): """ diff --git a/python/paddle/fluid/layers/rnn.py b/python/paddle/fluid/layers/rnn.py index d457f43c664..6ffa3512b79 100644 --- a/python/paddle/fluid/layers/rnn.py +++ b/python/paddle/fluid/layers/rnn.py @@ -1720,7 +1720,7 @@ def _dynamic_decode_declarative( states_arrays, ) if max_step_num is not None: - control_flow.logical_and( + paddle.logical_and( paddle.logical_not(nn.reduce_all(global_finished)), control_flow.less_equal(step_idx, max_step_num), cond, diff --git a/python/paddle/fluid/tests/book/test_machine_translation.py b/python/paddle/fluid/tests/book/test_machine_translation.py index b585c34adad..58426433ef2 100644 --- a/python/paddle/fluid/tests/book/test_machine_translation.py +++ b/python/paddle/fluid/tests/book/test_machine_translation.py @@ -166,7 +166,7 @@ def decoder_decode(context, is_sparse): # source sentences have ended. length_cond = pd.less_than(x=counter, y=array_len) finish_cond = paddle.logical_not(pd.is_empty(x=selected_ids)) - pd.logical_and(x=length_cond, y=finish_cond, out=cond) + paddle.logical_and(x=length_cond, y=finish_cond, out=cond) translation_ids, translation_scores = pd.beam_search_decode( ids=ids_array, scores=scores_array, beam_size=beam_size, end_id=10 diff --git a/python/paddle/fluid/tests/unittests/dist_transformer.py b/python/paddle/fluid/tests/unittests/dist_transformer.py index b9f32fd6c99..4d12648354a 100644 --- a/python/paddle/fluid/tests/unittests/dist_transformer.py +++ b/python/paddle/fluid/tests/unittests/dist_transformer.py @@ -1863,7 +1863,7 @@ def fast_decode( layers.assign(pre_caches[i]["v"], caches[i]["v"]) length_cond = layers.less_than(x=step_idx, y=max_len) finish_cond = paddle.logical_not(layers.is_empty(x=selected_ids)) - layers.logical_and(x=length_cond, y=finish_cond, out=cond) + paddle.logical_and(x=length_cond, y=finish_cond, out=cond) finished_ids, finished_scores = layers.beam_search_decode( ids, scores, beam_size=beam_size, end_id=eos_idx diff --git a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py index 730f17dc2b7..f5343386fdb 100644 --- a/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py +++ b/python/paddle/fluid/tests/unittests/dygraph_to_static/test_bmn.py @@ -351,9 +351,9 @@ def bmn_loss_func( gt_iou_map = fluid.layers.elementwise_mul(gt_iou_map, mask) u_hmask = fluid.layers.cast(x=gt_iou_map > 0.7, dtype=DATATYPE) - u_mmask = fluid.layers.logical_and(gt_iou_map <= 0.7, gt_iou_map > 0.3) + u_mmask = paddle.logical_and(gt_iou_map <= 0.7, gt_iou_map > 0.3) u_mmask = fluid.layers.cast(x=u_mmask, dtype=DATATYPE) - u_lmask = fluid.layers.logical_and(gt_iou_map <= 0.3, gt_iou_map >= 0.0) + u_lmask = paddle.logical_and(gt_iou_map <= 0.3, gt_iou_map >= 0.0) u_lmask = fluid.layers.cast(x=u_lmask, dtype=DATATYPE) u_lmask = fluid.layers.elementwise_mul(u_lmask, mask) diff --git a/python/paddle/fluid/tests/unittests/ipu/test_logical_x_op_ipu.py b/python/paddle/fluid/tests/unittests/ipu/test_logical_x_op_ipu.py index affa117682b..bc4c1f179e9 100644 --- a/python/paddle/fluid/tests/unittests/ipu/test_logical_x_op_ipu.py +++ b/python/paddle/fluid/tests/unittests/ipu/test_logical_x_op_ipu.py @@ -32,7 +32,7 @@ class TestLogicalAnd(IPUOpTest): return False def set_test_op(self): - self.op = paddle.fluid.layers.logical_and + self.op = paddle.logical_and def set_op_attrs(self): self.attrs = {} -- GitLab