diff --git a/doc/api/v2/config/layer.rst b/doc/api/v2/config/layer.rst index db33a20487e579cda67a01c52ee646829df0f4e6..8a8774bd696d60945c87487acbd3c2dee5a0c0bd 100644 --- a/doc/api/v2/config/layer.rst +++ b/doc/api/v2/config/layer.rst @@ -109,6 +109,12 @@ sum_to_one_norm :members: sum_to_one_norm :noindex: +cross_channel_norm +--------------- +.. automodule:: paddle.v2.layer + :members: cross_channel_norm + :noindex: + Recurrent Layers ================ diff --git a/paddle/gserver/layers/CrossChannelNormLayer.cpp b/paddle/gserver/layers/CrossChannelNormLayer.cpp index dd3612c49da2b922de28104360c5bdd67d9cbedc..0bc90966e2bfe03293175e69353b274d63718580 100644 --- a/paddle/gserver/layers/CrossChannelNormLayer.cpp +++ b/paddle/gserver/layers/CrossChannelNormLayer.cpp @@ -40,7 +40,6 @@ void CrossChannelNormLayer::forward(PassType passType) { normBuffer_->addScalar(*normBuffer_, 1e-6); inV->square2(*dataBuffer_); for (size_t i = 0; i < batchSize; i++) { - spatialBuffer_->zeroMem(); MatrixPtr inTmp = Matrix::create( inV->getData() + i * dataDim, channels_, spatialDim, false, useGpu_); MatrixPtr dataTmp = Matrix::create(dataBuffer_->getData() + i * dataDim, @@ -80,7 +79,6 @@ void CrossChannelNormLayer::backward(const UpdateCallback& callback) { scaleDiff_->zeroMem(); for (size_t i = 0; i < batchSize; i++) { spatialBuffer_->zeroMem(); - channelBuffer_->zeroMem(); // propagate to param. MatrixPtr dataBufferTmp = Matrix::create(dataBuffer_->getData() + i * dataDim, diff --git a/paddle/gserver/layers/NormLayer.h b/paddle/gserver/layers/NormLayer.h index 59ba226dfe5f5f96ce0d5a97c05851c60cace287..d896abbd757b89694e0cd6ec25adaa100ce4443e 100644 --- a/paddle/gserver/layers/NormLayer.h +++ b/paddle/gserver/layers/NormLayer.h @@ -66,11 +66,10 @@ public: }; /** - * This layer applys normalize across the channels of each sample to a - * conv layer's output and scale the output by a group of trainable factors - * which dimensions equal to the channel's number. - * - Input: One and only one input layer are accepted. The input layer must be - * be a data output layer. + * This layer applys normalization across the channels of each sample to a + * conv layer's output, and scales the output by a group of trainable factors + * whose equal to the number of channels. + * - Input: One and only one input layer are accepted. * - Output: The normalized data of the input data. * Reference: * Wei Liu, Dragomir Anguelov, Dumitru Erhan, Christian Szegedy, Scott Reed, diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index cac71539f59f392e65ac6023da71999816d4d3bb..df91a6d8cf8a9d114a1fdce0f624c6a9c8ba22d0 100755 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -1015,6 +1015,7 @@ def cross_channel_norm_layer(input, name=None, param_attr=None): This layer applys normalize across the channels of each sample to a conv layer's output and scale the output by a group of trainable factors which dimensions equal to the channel's number. + :param name: The Layer Name. :type name: basestring :param input: The input layer.