From 2ca53fa65fe5f5bd42ead699b97f1816bda0174f Mon Sep 17 00:00:00 2001 From: chentianyu03 Date: Sat, 17 Oct 2020 16:05:35 +0800 Subject: [PATCH] change paddle.fluid.layers.reduce_sum to paddle.sum in sample codes (#27998) (#28017) * change paddle.fluid.layers.reduce_sum to paddle.sum in sample codes * format codes --- paddle/fluid/pybind/imperative.cc | 2 +- python/paddle/nn/functional/loss.py | 14 +++++++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/paddle/fluid/pybind/imperative.cc b/paddle/fluid/pybind/imperative.cc index b5dad4034a5..4c46af3199e 100644 --- a/paddle/fluid/pybind/imperative.cc +++ b/paddle/fluid/pybind/imperative.cc @@ -712,7 +712,7 @@ void BindImperative(py::module *m_ptr) { tmp.stop_gradient=False inputs.append(tmp) ret = paddle.sums(inputs2) - loss = paddle.fluid.layers.reduce_sum(ret) + loss = paddle.sum(ret) loss.backward() print("Before clear_gradient {}".format(loss.grad)) loss.clear_gradient() diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index 2ca2015b729..b056029fb5a 100644 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -305,13 +305,15 @@ def binary_cross_entropy_with_logits(logit, out = paddle.fluid.layers.sigmoid_cross_entropy_with_logits( logit, label, name=sigmoid_name) - one = paddle.fluid.layers.fill_constant(shape=[1], value=1.0, dtype=logit.dtype) + one = paddle.fluid.layers.fill_constant( + shape=[1], value=1.0, dtype=logit.dtype) if pos_weight is not None: fluid.data_feeder.check_variable_and_dtype( pos_weight, 'pos_weight', ['float32', 'float64'], 'binary_cross_entropy_with_logits') log_weight = paddle.add( - paddle.multiply(label, paddle.fluid.layers.elementwise_sub(pos_weight, one)), + paddle.multiply( + label, paddle.fluid.layers.elementwise_sub(pos_weight, one)), one) pos_weight_name = name if reduction == 'none' and weight is None else None out = paddle.multiply(out, log_weight, name=pos_weight_name) @@ -618,7 +620,8 @@ def margin_ranking_loss(input, if margin != 0.0: margin_var = out.block.create_var(dtype=out.dtype) - paddle.fluid.layers.fill_constant([1], out.dtype, margin, out=margin_var) + paddle.fluid.layers.fill_constant( + [1], out.dtype, margin, out=margin_var) out = paddle.add(out, margin_var) result_out = helper.create_variable_for_type_inference(input.dtype) @@ -729,7 +732,8 @@ def l1_loss(input, label, reduction='mean', name=None): unreduced = paddle.fluid.layers.elementwise_sub(input, label, act='abs') return paddle.mean(unreduced, name=name) else: - return paddle.fluid.layers.elementwise_sub(input, label, act='abs', name=name) + return paddle.fluid.layers.elementwise_sub( + input, label, act='abs', name=name) def nll_loss(input, @@ -1342,7 +1346,7 @@ def sigmoid_focal_loss(logit, label = paddle.to_tensor([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0]], dtype='float32') one = paddle.to_tensor([1.], dtype='float32') fg_label = paddle.greater_equal(label, one) - fg_num = paddle.fluid.layers.reduce_sum(paddle.cast(fg_label, dtype='float32')) + fg_num = paddle.sum(paddle.cast(fg_label, dtype='float32')) output = paddle.nn.functional.sigmoid_focal_loss(logit, label, normalizer=fg_num) print(output.numpy()) # [0.65782464] -- GitLab