From c15d4208b9e5f651d26dfdcab249bc8023769dc9 Mon Sep 17 00:00:00 2001 From: Kaipeng Deng Date: Mon, 30 Dec 2019 11:13:08 +0800 Subject: [PATCH] fix no grad check for pool_max & spectral_norm (#21971) * fix no grad check for pool_max & spectral_norm. test=develop --- .../paddle/fluid/tests/unittests/test_pool_max_op.py | 9 +++++---- .../fluid/tests/unittests/test_spectral_norm_op.py | 10 +++++++++- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/test_pool_max_op.py b/python/paddle/fluid/tests/unittests/test_pool_max_op.py index 8905c900dc..4b3c777ccf 100644 --- a/python/paddle/fluid/tests/unittests/test_pool_max_op.py +++ b/python/paddle/fluid/tests/unittests/test_pool_max_op.py @@ -141,6 +141,7 @@ class TestMaxPoolWithIndex_Op(OpTest): self.init_adaptive() input = np.random.random(self.shape).astype("float64") + input = np.round(input * 100., 2) output, mask = self.pool_forward_naive(input, self.ksize, self.strides, self.paddings, self.global_pool, self.adaptive) @@ -161,15 +162,15 @@ class TestMaxPoolWithIndex_Op(OpTest): def test_check_output(self): self.check_output() - # def test_check_grad(self): - # self.check_grad(set(['X']), ['Out'], max_relative_error=0.07) + def test_check_grad(self): + self.check_grad(set(['X']), ['Out']) def init_test_case(self): self.op_type = "max_pool3d_with_index" self.pool_forward_naive = max_pool3D_forward_naive - self.shape = [2, 3, 5, 5, 5] + self.shape = [2, 3, 7, 7, 7] self.ksize = [3, 3, 3] - self.strides = [1, 1, 1] + self.strides = [2, 2, 2] self.paddings = [1, 1, 1] def init_global(self): diff --git a/python/paddle/fluid/tests/unittests/test_spectral_norm_op.py b/python/paddle/fluid/tests/unittests/test_spectral_norm_op.py index 104df2efed..cf19aa9b85 100644 --- a/python/paddle/fluid/tests/unittests/test_spectral_norm_op.py +++ b/python/paddle/fluid/tests/unittests/test_spectral_norm_op.py @@ -16,7 +16,7 @@ from __future__ import division import unittest import numpy as np -from op_test import OpTest +from op_test import OpTest, skip_check_grad_ci from paddle.fluid import core @@ -45,6 +45,10 @@ def spectral_norm(weight, u, v, dim, power_iters, eps): return weight / sigma +@skip_check_grad_ci( + reason="Spectral norm do not check grad when power_iters > 0 " + "because grad is not calculated in power iterations, " + "which cannot be checked by python grad unittests") class TestSpectralNormOpNoGrad(OpTest): def setUp(self): self.initTestCase() @@ -81,6 +85,10 @@ class TestSpectralNormOpNoGrad(OpTest): self.eps = 1e-12 +@skip_check_grad_ci( + reason="Spectral norm do not check grad when power_iters > 0 " + "because grad is not calculated in power iterations, " + "which cannot be checked by python grad unittests") class TestSpectralNormOpNoGrad2(TestSpectralNormOpNoGrad): def initTestCase(self): self.weight_shape = (2, 3, 3, 3) -- GitLab