From 1e6c917ecc76905fa0f33090d757f2b6376e437f Mon Sep 17 00:00:00 2001 From: hedaoyuan Date: Tue, 10 Jan 2017 15:30:24 +0800 Subject: [PATCH] fix unit test of paramRelu --- paddle/math/Matrix.cpp | 24 ++++++++++++++++++------ paddle/math/tests/test_Matrix.cpp | 5 +++-- paddle/math/tests/test_matrixCompare.cpp | 5 +++-- 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/paddle/math/Matrix.cpp b/paddle/math/Matrix.cpp index 90813a8996..3ae237bc7d 100644 --- a/paddle/math/Matrix.cpp +++ b/paddle/math/Matrix.cpp @@ -1311,7 +1311,9 @@ void GpuMatrix::paramReluForward(Matrix& data, Matrix& W) { real* w = W.getData(); size_t numElements = data.getWidth(); size_t numSamples = data.getHeight(); - size_t partial_sum = numElements / (W.getHeight() * W.getWidth()); + size_t paraSize = W.getHeight() * W.getWidth(); + CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init + size_t partial_sum = numElements / paraSize; real* output = getData(); hl_param_relu_forward(output, input, w, numElements, numSamples, partial_sum); } @@ -1324,7 +1326,9 @@ void GpuMatrix::paramReluBackwardW(Matrix& oGrad, Matrix& data) { real* wgrad = data_; size_t numElements = data.getWidth(); size_t numSamples = data.getHeight(); - size_t partial_sum = numElements / (this->getHeight() * this->getWidth()); + size_t paraSize = this->getHeight() * this->getWidth(); + CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init + size_t partial_sum = numElements / paraSize; hl_param_relu_backward_w( wgrad, ograd, input, numElements, numSamples, partial_sum); } @@ -1336,7 +1340,9 @@ void GpuMatrix::paramReluBackwardDiff(Matrix& oGrad, Matrix& data, Matrix& W) { real* w = W.getData(); size_t numElements = data.getWidth(); size_t numSamples = data.getHeight(); - size_t partial_sum = numElements / (W.getHeight() * W.getWidth()); + size_t paraSize = W.getHeight() * W.getWidth(); + CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init + size_t partial_sum = numElements / paraSize; hl_param_relu_backward_diff( ograd, input, w, diff, numElements, numSamples, partial_sum); } @@ -3764,7 +3770,9 @@ void CpuMatrix::paramReluForward(Matrix& data, Matrix& W) { real* w = W.getData(); size_t numElements = data.getWidth(); size_t numSamples = data.getHeight(); - size_t partial_sum = numElements / (W.getHeight() * W.getWidth()); + size_t paraSize = W.getHeight() * W.getWidth(); + CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init + size_t partial_sum = numElements / paraSize; for (size_t n = 0, k = 0; n < numSamples; ++n) { for (size_t i = 0; i < numElements; ++i, ++k) { data_[k] = input[k] > 0 ? input[k] : input[k] * w[i / partial_sum]; @@ -3778,7 +3786,9 @@ void CpuMatrix::paramReluBackwardW(Matrix& oGrad, Matrix& data) { real* wgrad = data_; size_t numElements = data.getWidth(); size_t numSamples = data.getHeight(); - size_t partial_sum = numElements / (this->getHeight() * this->getWidth()); + size_t paraSize = this->getHeight() * this->getWidth(); + CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init + size_t partial_sum = numElements / paraSize; for (size_t n = 0, k = 0; n < numSamples; ++n) { for (size_t i = 0; i < numElements; ++i, ++k) { wgrad[i / partial_sum] += ograd[k] * (input[k] > 0 ? 0 : input[k]); @@ -3793,7 +3803,9 @@ void CpuMatrix::paramReluBackwardDiff(Matrix& oGrad, Matrix& data, Matrix& W) { real* w = W.getData(); size_t numElements = data.getWidth(); size_t numSamples = data.getHeight(); - size_t partial_sum = numElements / (W.getHeight() * W.getWidth()); + size_t paraSize = W.getHeight() * W.getWidth(); + CHECK(!(numElements % paraSize)); // this check from ParameterReluLayer::init + size_t partial_sum = numElements / paraSize; for (size_t n = 0, k = 0; n < numSamples; ++n) { for (size_t i = 0; i < numElements; ++i, ++k) { diff[k] += ograd[k] * (input[k] > 0 ? 1 : w[i / partial_sum]); diff --git a/paddle/math/tests/test_Matrix.cpp b/paddle/math/tests/test_Matrix.cpp index 6899769144..a4084bdf7c 100644 --- a/paddle/math/tests/test_Matrix.cpp +++ b/paddle/math/tests/test_Matrix.cpp @@ -224,10 +224,11 @@ void testParamReluBackwardW(int height, int width, int w_height, int w_width) { } TEST(Matrix, paramRelu) { - for (auto height : {10, 100}) { - for (auto width : {10, 100}) { + for (auto height : {10, 40, 100}) { + for (auto width : {10, 40, 100}) { for (auto w_height : {1, 2}) { for (auto w_width : {1, 2}) { + if (width % (w_height * w_width)) continue; testParamReluForward(height, width, w_height, w_width); testParamReluBackwardW(height, width, w_height, w_width); } diff --git a/paddle/math/tests/test_matrixCompare.cpp b/paddle/math/tests/test_matrixCompare.cpp index 3a780d26c0..f0c49791d7 100644 --- a/paddle/math/tests/test_matrixCompare.cpp +++ b/paddle/math/tests/test_matrixCompare.cpp @@ -773,10 +773,11 @@ void testParamReluBackwardDiff(int height, } TEST(Matrix, paramReluBackwardDiff) { - for (auto height : {10, 100}) { - for (auto width : {10, 100}) { + for (auto height : {10, 40, 100}) { + for (auto width : {10, 40, 100}) { for (auto w_height : {1, 2}) { for (auto w_width : {1, 2}) { + if (width % (w_height * w_width)) continue; testParamReluBackwardDiff(height, width, w_height, w_width); } } -- GitLab