From 17e16c2548e95c84bffc4bed5c8199b8836841fb Mon Sep 17 00:00:00 2001 From: guosheng Date: Wed, 2 Aug 2017 19:43:04 +0800 Subject: [PATCH] Refine RowL2NormLayer and add python unit test for it --- doc/api/v2/config/layer.rst | 5 ++++ paddle/gserver/layers/RowL2NormLayer.cpp | 25 +++++++++-------- python/paddle/trainer/config_parser.py | 8 +++--- .../tests/configs/file_list.sh | 2 +- .../protostr/test_row_l2_norm_layer.protostr | 27 +++++++++++++++++++ .../tests/configs/test_row_l2_norm_layer.py | 6 +++++ 6 files changed, 55 insertions(+), 18 deletions(-) create mode 100644 python/paddle/trainer_config_helpers/tests/configs/protostr/test_row_l2_norm_layer.protostr create mode 100644 python/paddle/trainer_config_helpers/tests/configs/test_row_l2_norm_layer.py diff --git a/doc/api/v2/config/layer.rst b/doc/api/v2/config/layer.rst index daee55b7f..9a317d416 100644 --- a/doc/api/v2/config/layer.rst +++ b/doc/api/v2/config/layer.rst @@ -104,6 +104,11 @@ cross_channel_norm ------------------ .. autoclass:: paddle.v2.layer.cross_channel_norm :noindex: + +row_l2_norm +----------- +.. autoclass:: paddle.v2.layer.row_l2_norm + :noindex: Recurrent Layers ================ diff --git a/paddle/gserver/layers/RowL2NormLayer.cpp b/paddle/gserver/layers/RowL2NormLayer.cpp index 1362c6ef1..0d609be43 100644 --- a/paddle/gserver/layers/RowL2NormLayer.cpp +++ b/paddle/gserver/layers/RowL2NormLayer.cpp @@ -13,7 +13,6 @@ See the License for the specific language governing permissions and limitations under the License. */ #include "Layer.h" -#include "paddle/math/Matrix.h" namespace paddle { @@ -29,7 +28,7 @@ namespace paddle { class RowL2NormLayer : public Layer { protected: MatrixPtr inSquare_; - MatrixPtr reciSqrtRowSquareSum_; + MatrixPtr l2NormReciprocal_; MatrixPtr dotSum_; public: @@ -67,11 +66,11 @@ void RowL2NormLayer::forward(PassType passType) { Matrix::resizeOrCreate(inSquare_, batchSize, dataDim, false, useGpu_); inV->square2(*inSquare_); - Matrix::resizeOrCreate(reciSqrtRowSquareSum_, batchSize, 1, false, useGpu_); - inSquare_->rowSum(*reciSqrtRowSquareSum_); - reciSqrtRowSquareSum_->sqrt2(*reciSqrtRowSquareSum_); - reciSqrtRowSquareSum_->scalarDiv(*reciSqrtRowSquareSum_, 1.0); - outV->rowScale(0, *inV, *reciSqrtRowSquareSum_); + Matrix::resizeOrCreate(l2NormReciprocal_, batchSize, 1, false, useGpu_); + inSquare_->rowSum(*l2NormReciprocal_); + l2NormReciprocal_->sqrt2(*l2NormReciprocal_); + l2NormReciprocal_->scalarDiv(*l2NormReciprocal_, 1.0); + outV->rowScale(0, *inV, *l2NormReciprocal_); } void RowL2NormLayer::backward(const UpdateCallback& callback) { @@ -81,18 +80,18 @@ void RowL2NormLayer::backward(const UpdateCallback& callback) { MatrixPtr outG = getOutputGrad(); size_t batchSize = inV->getHeight(); - // inG[ij] += outG[ij] / reciSqrtRowSquareSum - // inG[ij] += -inV[ij] * reciSqrtRowSquareSum * reciSqrtRowSquareSum * - // DotMul(outG[i], inV[i]) + // inG[ij] += outG[ij] / l2NormReciprocal + // inG[ij] += -inV[ij] * l2NormReciprocal * l2NormReciprocal * DotMul(outG[i], + // inV[i]) if (inG) { Matrix::resizeOrCreate(dotSum_, batchSize, 1, false, useGpu_); dotSum_->zeroMem(); dotSum_->rowDotMul(0, *outG, *outV); - dotSum_->dotMul(*dotSum_, *reciSqrtRowSquareSum_); - dotSum_->dotMul(*dotSum_, *reciSqrtRowSquareSum_); + dotSum_->dotMul(*dotSum_, *l2NormReciprocal_); + dotSum_->dotMul(*dotSum_, *l2NormReciprocal_); inSquare_->rowScale(0, *inV, *dotSum_); inG->sub(*inSquare_); - inG->addRowScale(0, *outG, *reciSqrtRowSquareSum_); + inG->addRowScale(0, *outG, *l2NormReciprocal_); } } diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index c5e56e59d..3587ea175 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -2727,12 +2727,12 @@ class SumToOneNormLayer(LayerBase): @config_layer('row_l2_norm') class RowL2NormLayer(LayerBase): - def __init__(self, name, inputs, device=None): + def __init__(self, name, inputs, **xargs): super(RowL2NormLayer, self).__init__( - name, 'row_l2_norm', 0, inputs=inputs, device=device) + name, 'row_l2_norm', 0, inputs=inputs, **xargs) config_assert(len(self.inputs) == 1, 'RowL2NormLayer must have 1 input') - input_layer0 = self.get_input_layer(0) - self.set_layer_size(input_layer0.size) + input_layer = self.get_input_layer(0) + self.set_layer_size(input_layer.size) @config_layer('cos_vm') diff --git a/python/paddle/trainer_config_helpers/tests/configs/file_list.sh b/python/paddle/trainer_config_helpers/tests/configs/file_list.sh index cdf9b2eab..5b7ad22a1 100755 --- a/python/paddle/trainer_config_helpers/tests/configs/file_list.sh +++ b/python/paddle/trainer_config_helpers/tests/configs/file_list.sh @@ -7,6 +7,6 @@ test_rnn_group shared_fc shared_lstm shared_gru test_cost_layers_with_weight test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops test_seq_concat_reshape test_pad test_smooth_l1 test_multiplex_layer test_prelu_layer test_row_conv test_detection_output_layer test_multibox_loss_layer -test_recursive_topology test_gated_unit_layer) +test_recursive_topology test_gated_unit_layer test_row_l2_norm_layer) export whole_configs=(test_split_datasource) diff --git a/python/paddle/trainer_config_helpers/tests/configs/protostr/test_row_l2_norm_layer.protostr b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_row_l2_norm_layer.protostr new file mode 100644 index 000000000..c2786ff55 --- /dev/null +++ b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_row_l2_norm_layer.protostr @@ -0,0 +1,27 @@ +type: "nn" +layers { + name: "input" + type: "data" + size: 300 + active_type: "" +} +layers { + name: "__row_l2_norm_layer_0__" + type: "row_l2_norm" + size: 300 + active_type: "" + inputs { + input_layer_name: "input" + } +} +input_layer_names: "input" +output_layer_names: "__row_l2_norm_layer_0__" +sub_models { + name: "root" + layer_names: "input" + layer_names: "__row_l2_norm_layer_0__" + input_layer_names: "input" + output_layer_names: "__row_l2_norm_layer_0__" + is_recurrent_layer_group: false +} + diff --git a/python/paddle/trainer_config_helpers/tests/configs/test_row_l2_norm_layer.py b/python/paddle/trainer_config_helpers/tests/configs/test_row_l2_norm_layer.py new file mode 100644 index 000000000..ac8badb26 --- /dev/null +++ b/python/paddle/trainer_config_helpers/tests/configs/test_row_l2_norm_layer.py @@ -0,0 +1,6 @@ +from paddle.trainer_config_helpers import * + +data = data_layer(name='input', size=300) +row_l2_norm = row_l2_norm_layer(input=data) + +outputs(row_l2_norm) -- GitLab