From b1fc23869417d3bf6c1f647042c8ecfea58043b4 Mon Sep 17 00:00:00 2001 From: guochaorong Date: Thu, 23 Aug 2018 22:36:53 +0800 Subject: [PATCH] Revert "Disable in_place in batch_norm API. (#12736)" This reverts commit f5d5d7b2d989e8aa5b5e637fd04318566b23f2fe. --- paddle/fluid/operators/batch_norm_op.cc | 2 +- python/paddle/fluid/layers/nn.py | 9 ++------- python/paddle/fluid/nets.py | 2 +- .../paddle/fluid/tests/book/test_image_classification.py | 5 +---- 4 files changed, 5 insertions(+), 13 deletions(-) diff --git a/paddle/fluid/operators/batch_norm_op.cc b/paddle/fluid/operators/batch_norm_op.cc index 969f75544f..5912a1a17c 100644 --- a/paddle/fluid/operators/batch_norm_op.cc +++ b/paddle/fluid/operators/batch_norm_op.cc @@ -135,7 +135,7 @@ class BatchNormOpMaker : public framework::OpProtoAndCheckerMaker { AddInput("Variance", "The global variance (for training) " "or estimated Variance (for testing)"); - AddOutput("Y", "result after normalization"); + AddOutput("Y", "result after normalization").Reuse("X"); AddOutput("MeanOut", "Share memory with Mean. " "Store the global mean when training") diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 83250f65e4..4bd260a005 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -27,7 +27,6 @@ from . import utils import random from .. import unique_name from functools import reduce -import warnings __all__ = [ 'fc', @@ -2048,7 +2047,7 @@ def batch_norm(input, param_attr(ParamAttr): The parameter attribute for Parameter `scale`. bias_attr(ParamAttr): The parameter attribute for Parameter `bias`. data_layout(string, default NCHW): NCHW|NHWC - in_place(bool, Default False): This argument is deprecated since 0.15.0. + in_place(bool, Default False): Make the input and output of batch norm reuse memory. use_mkldnn(bool, Default false): ${use_mkldnn_comment} name(string, Default None): A name for this layer(optional). If set None, the layer will be named automatically. @@ -2070,10 +2069,6 @@ def batch_norm(input, helper = LayerHelper('batch_norm', **locals()) dtype = helper.input_dtype() - if in_place: - raise warnings.warn("The argument in_place is deprecated since 0.15.0, " - "please do not set it True.") - input_shape = input.shape if data_layout == 'NCHW': channel_num = input_shape[1] @@ -2123,7 +2118,7 @@ def batch_norm(input, saved_mean = helper.create_tmp_variable(dtype=dtype, stop_gradient=True) saved_variance = helper.create_tmp_variable(dtype=dtype, stop_gradient=True) - batch_norm_out = helper.create_tmp_variable(dtype) + batch_norm_out = input if in_place else helper.create_tmp_variable(dtype) helper.append_op( type="batch_norm", diff --git a/python/paddle/fluid/nets.py b/python/paddle/fluid/nets.py index 01563cbbb7..051fe84364 100644 --- a/python/paddle/fluid/nets.py +++ b/python/paddle/fluid/nets.py @@ -229,7 +229,7 @@ def img_conv_group(input, use_mkldnn=use_mkldnn) if conv_with_batchnorm[i]: - tmp = layers.batch_norm(input=tmp, act=conv_act) + tmp = layers.batch_norm(input=tmp, act=conv_act, in_place=True) drop_rate = conv_batchnorm_drop_rate[i] if abs(drop_rate) > 1e-5: tmp = layers.dropout(x=tmp, dropout_prob=drop_rate) diff --git a/python/paddle/fluid/tests/book/test_image_classification.py b/python/paddle/fluid/tests/book/test_image_classification.py index cd1e8cd682..9fe361425c 100644 --- a/python/paddle/fluid/tests/book/test_image_classification.py +++ b/python/paddle/fluid/tests/book/test_image_classification.py @@ -256,10 +256,7 @@ def main(net_type, use_cuda, is_local=True): save_dirname = "image_classification_" + net_type + ".inference.model" train(net_type, use_cuda, save_dirname, is_local) - - # There is bug in fluid.InferenceTranspiler for VGG. - if net_type == "resnet": - infer(use_cuda, save_dirname) + infer(use_cuda, save_dirname) class TestImageClassification(unittest.TestCase): -- GitLab