From 1e8346fedd12fd8b65c5591ffba230c2d5feee05 Mon Sep 17 00:00:00 2001 From: 201716010711 <87008376+201716010711@users.noreply.github.com> Date: Tue, 22 Nov 2022 13:34:55 +0800 Subject: [PATCH] delete logical_not api (#48078) --- .../dygraph_to_static/convert_operators.py | 3 +- python/paddle/fluid/layers/control_flow.py | 9 ++--- python/paddle/fluid/layers/nn.py | 36 ------------------- python/paddle/fluid/layers/rnn.py | 16 ++++----- .../tests/book/test_machine_translation.py | 2 +- .../fluid/tests/unittests/dist_transformer.py | 3 +- .../unittests/ipu/test_logical_not_op_ipu.py | 2 +- 7 files changed, 18 insertions(+), 53 deletions(-) diff --git a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py index fc91a3a797..6de6d79267 100644 --- a/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py +++ b/python/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py @@ -37,7 +37,6 @@ from paddle.fluid.layers import ( cast, control_flow, logical_and, - logical_not, logical_or, nn, ) @@ -318,7 +317,7 @@ def convert_logical_not(x): def _run_paddle_logical_not(x): x = cast_bool_if_necessary(x) - return logical_not(x) + return paddle.logical_not(x) def _run_py_logical_not(x): diff --git a/python/paddle/fluid/layers/control_flow.py b/python/paddle/fluid/layers/control_flow.py index 658941ad44..ee53f23684 100755 --- a/python/paddle/fluid/layers/control_flow.py +++ b/python/paddle/fluid/layers/control_flow.py @@ -27,7 +27,7 @@ from ..framework import ( in_dygraph_mode, ) from ..layer_helper import LayerHelper, unique_name -from .nn import logical_and, logical_not, logical_or +from .nn import logical_and, logical_or from .utils import ( assert_same_structure, map_structure, @@ -49,6 +49,7 @@ from ..data_feeder import ( check_dtype, ) from ..backward import _infer_var_data_type_shape_ +import paddle from paddle import _C_ops, _legacy_C_ops __all__ = [ @@ -2807,7 +2808,7 @@ def cond(pred, true_fn=None, false_fn=None, name=None, return_names=None): ) ) false_cond_block = ConditionalBlock( - [logical_not(pred)], is_scalar_condition=True + [paddle.logical_not(pred)], is_scalar_condition=True ) with false_cond_block.block(): origin_false_output = false_fn() @@ -3260,13 +3261,13 @@ class Switch: if len(self.pre_not_conditions) == 0: cond_block = ConditionalBlock([condition], is_scalar_condition=True) - not_cond = logical_not(x=condition) + not_cond = paddle.logical_not(x=condition) self.pre_not_conditions.append(not_cond) else: pre_cond_num = len(self.pre_not_conditions) pre_not_cond = self.pre_not_conditions[pre_cond_num - 1] new_not_cond = logical_and( - x=pre_not_cond, y=logical_not(x=condition) + x=pre_not_cond, y=paddle.logical_not(x=condition) ) self.pre_not_conditions.append(new_not_cond) cond_block = ConditionalBlock( diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index d6b2a4bbbc..b699de304e 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -151,7 +151,6 @@ __all__ = [ 'size', 'logical_and', 'logical_or', - 'logical_not', 'clip', 'clip_by_norm', 'mean', @@ -11549,41 +11548,6 @@ def logical_or(x, y, out=None, name=None): ) -@templatedoc() -def logical_not(x, out=None, name=None): - """ - - ``logical_not`` operator computes element-wise logical NOT on ``x``, and returns ``out``. ``out`` is N-dim boolean ``Variable``. - Each element of ``out`` is calculated by - - .. math:: - - out = !x - - Args: - x(Tensor): Operand of logical_not operator. Must be a Tensor of type bool, int8, int16, in32, in64, float32, or float64. - out(Tensor): The ``Tensor`` that specifies the output of the operator, which can be any ``Tensor`` that has been created in the program. The default value is None, and a new ``Tensor` will be created to save the output. - name(str|None): The default value is None. Normally there is no need for users to set this property. For more information, please refer to :ref:`api_guide_Name`. - - Returns: - Tensor: ${out_comment} - - Examples: - .. code-block:: python - - import paddle - - x = paddle.to_tensor([True, False, True, False]) - res = paddle.logical_not(x) - print(res) # [False True False True] - """ - if in_dygraph_mode(): - return _C_ops.logical_not(x) - return _logical_op( - op_name="logical_not", x=x, y=None, name=name, out=out, binary_op=False - ) - - @templatedoc() def clip(x, min, max, name=None): """ diff --git a/python/paddle/fluid/layers/rnn.py b/python/paddle/fluid/layers/rnn.py index 82da847bbc..3401fe4687 100644 --- a/python/paddle/fluid/layers/rnn.py +++ b/python/paddle/fluid/layers/rnn.py @@ -1332,7 +1332,7 @@ class BeamSearchDecoder(Decoder): beam_state.lengths, beam_indices, self.batch_size ) next_lengths = next_lengths + tensor.cast( - nn.logical_not(next_finished), beam_state.lengths.dtype + paddle.logical_not(next_finished), beam_state.lengths.dtype ) next_finished = control_flow.logical_or( next_finished, @@ -1481,7 +1481,7 @@ def _dynamic_decode_imperative( initial_states, initial_finished, ) - cond = control_flow.logical_not((nn.reduce_all(initial_finished))) + cond = paddle.logical_not((nn.reduce_all(initial_finished))) sequence_lengths = tensor.cast(tensor.zeros_like(initial_finished), "int64") outputs = None @@ -1505,7 +1505,7 @@ def _dynamic_decode_imperative( next_sequence_lengths = nn.elementwise_add( sequence_lengths, tensor.cast( - control_flow.logical_not(finished), sequence_lengths.dtype + paddle.logical_not(finished), sequence_lengths.dtype ), ) if impute_finished: # rectify the states for the finished. @@ -1539,7 +1539,7 @@ def _dynamic_decode_imperative( control_flow.increment(x=step_idx_tensor, value=1.0, in_place=True) step_idx += 1 - cond = control_flow.logical_not(nn.reduce_all(finished)) + cond = paddle.logical_not(nn.reduce_all(finished)) if max_step_num is not None and step_idx > max_step_num: break @@ -1587,7 +1587,7 @@ def _dynamic_decode_declarative( global_finished.stop_gradient = True step_idx = tensor.fill_constant(shape=[1], dtype="int64", value=0) - cond = control_flow.logical_not((nn.reduce_all(initial_finished))) + cond = paddle.logical_not((nn.reduce_all(initial_finished))) if max_step_num is not None: max_step_num = tensor.fill_constant( shape=[1], dtype="int64", value=max_step_num @@ -1665,7 +1665,7 @@ def _dynamic_decode_declarative( next_sequence_lengths = nn.elementwise_add( sequence_lengths, tensor.cast( - control_flow.logical_not(global_finished), + paddle.logical_not(global_finished), sequence_lengths.dtype, ), ) @@ -1720,12 +1720,12 @@ def _dynamic_decode_declarative( ) if max_step_num is not None: control_flow.logical_and( - control_flow.logical_not(nn.reduce_all(global_finished)), + paddle.logical_not(nn.reduce_all(global_finished)), control_flow.less_equal(step_idx, max_step_num), cond, ) else: - control_flow.logical_not(nn.reduce_all(global_finished), cond) + paddle.logical_not(nn.reduce_all(global_finished), cond) final_outputs = map_structure( lambda array: tensor.tensor_array_to_tensor( diff --git a/python/paddle/fluid/tests/book/test_machine_translation.py b/python/paddle/fluid/tests/book/test_machine_translation.py index 4ae6462f02..27da08ea00 100644 --- a/python/paddle/fluid/tests/book/test_machine_translation.py +++ b/python/paddle/fluid/tests/book/test_machine_translation.py @@ -162,7 +162,7 @@ def decoder_decode(context, is_sparse): # update the break condition: up to the max length or all candidates of # source sentences have ended. length_cond = pd.less_than(x=counter, y=array_len) - finish_cond = pd.logical_not(pd.is_empty(x=selected_ids)) + finish_cond = paddle.logical_not(pd.is_empty(x=selected_ids)) pd.logical_and(x=length_cond, y=finish_cond, out=cond) translation_ids, translation_scores = pd.beam_search_decode( diff --git a/python/paddle/fluid/tests/unittests/dist_transformer.py b/python/paddle/fluid/tests/unittests/dist_transformer.py index 514fcf4b86..88ec3188c9 100644 --- a/python/paddle/fluid/tests/unittests/dist_transformer.py +++ b/python/paddle/fluid/tests/unittests/dist_transformer.py @@ -26,6 +26,7 @@ import tarfile import paddle.fluid as fluid import paddle.fluid.layers as layers from test_dist_base import TestDistRunnerBase, runtime_main, RUN_STEP +import paddle const_para_attr = fluid.ParamAttr(initializer=fluid.initializer.Constant(0.001)) const_bias_attr = const_para_attr @@ -1860,7 +1861,7 @@ def fast_decode( layers.assign(pre_caches[i]["k"], caches[i]["k"]) layers.assign(pre_caches[i]["v"], caches[i]["v"]) length_cond = layers.less_than(x=step_idx, y=max_len) - finish_cond = layers.logical_not(layers.is_empty(x=selected_ids)) + finish_cond = paddle.logical_not(layers.is_empty(x=selected_ids)) layers.logical_and(x=length_cond, y=finish_cond, out=cond) finished_ids, finished_scores = layers.beam_search_decode( diff --git a/python/paddle/fluid/tests/unittests/ipu/test_logical_not_op_ipu.py b/python/paddle/fluid/tests/unittests/ipu/test_logical_not_op_ipu.py index c75f6faa65..26c63cc583 100644 --- a/python/paddle/fluid/tests/unittests/ipu/test_logical_not_op_ipu.py +++ b/python/paddle/fluid/tests/unittests/ipu/test_logical_not_op_ipu.py @@ -42,7 +42,7 @@ class TestBase(IPUOpTest): x = paddle.static.data( name=self.feed_list[0], shape=self.feed_shape[0], dtype="bool" ) - out = paddle.fluid.layers.logical_not(x) + out = paddle.logical_not(x) self.fetch_list = [out.name] def run_model(self, exec_mode): -- GitLab