diff --git a/python/paddle/fluid/tests/unittests/test_sync_batch_norm_op.py b/python/paddle/fluid/tests/unittests/test_sync_batch_norm_op.py index 4fa64bef32fff0312e5076e854026226754e5b1c..baac0af5d61afe570f46cd65d292262f98e219af 100644 --- a/python/paddle/fluid/tests/unittests/test_sync_batch_norm_op.py +++ b/python/paddle/fluid/tests/unittests/test_sync_batch_norm_op.py @@ -25,6 +25,7 @@ import six import paddle import paddle.fluid.core as core import paddle.fluid as fluid +import paddle.nn as nn from paddle.fluid import compiler from paddle.fluid import Program, program_guard @@ -244,5 +245,34 @@ class TestConvertSyncBatchNorm(unittest.TestCase): isinstance(model[idx], paddle.nn.SyncBatchNorm), True) +class TestConvertSyncBatchNormCase2(unittest.TestCase): + def test_convert(self): + if not core.is_compiled_with_cuda(): + return + + class Net(nn.Layer): + def __init__(self): + super(Net, self).__init__() + self.conv1 = nn.Conv2D(3, 5, 3) + self.bn = [] + bn = self.add_sublayer('bn', nn.BatchNorm2D(5)) + self.bn.append(bn) + + def forward(self, x): + x = self.conv1(x) + for bn in self.bn: + x = bn(x) + return x + + model = nn.Sequential() + model.add_sublayer('net1', Net()) + model.add_sublayer('net2', Net()) + compare_model = nn.Sequential() + compare_model.add_sublayer('net1', Net()) + compare_model.add_sublayer('net2', Net()) + model = nn.SyncBatchNorm.convert_sync_batchnorm(model) + self.assertEqual(len(compare_model.sublayers()), len(model.sublayers())) + + if __name__ == '__main__': unittest.main() diff --git a/python/paddle/nn/layer/norm.py b/python/paddle/nn/layer/norm.py index d8a4066cf0311e8425c537585d73434709eabd63..256694c7af67dd23a5056d929b8788676cddcb56 100644 --- a/python/paddle/nn/layer/norm.py +++ b/python/paddle/nn/layer/norm.py @@ -1121,7 +1121,7 @@ class SyncBatchNorm(_BatchNormBase): layer_output._mean = layer._mean layer_output._variance = layer._variance - for name, sublayer in layer.named_sublayers(): + for name, sublayer in layer.named_children(): layer_output.add_sublayer(name, cls.convert_sync_batchnorm(sublayer)) del layer