diff --git a/python/paddle/fluid/tests/unittests/mkldnn/mkldnn_op_test.py b/python/paddle/fluid/tests/unittests/mkldnn/mkldnn_op_test.py index 57a5714fc7853905703e9db31bc143fb5cabfacb..c47115c466fc97548f5152cbca14d29aec9f675a 100644 --- a/python/paddle/fluid/tests/unittests/mkldnn/mkldnn_op_test.py +++ b/python/paddle/fluid/tests/unittests/mkldnn/mkldnn_op_test.py @@ -19,13 +19,14 @@ import paddle.fluid.core as core import paddle.fluid as fluid +def __assert_close(test_case, tensor, np_array, msg, atol=1e-4): + test_case.assertTrue( + np.allclose( + np.array(tensor), np_array, atol=atol), msg) + + def check_if_mkldnn_primitives_exist_in_bwd(test_case, op_type, x, out, out_grad, x_grad): - def __assert_close(tensor, np_array, msg, atol=1e-4): - test_case.assertTrue( - np.allclose( - np.array(tensor), np_array, atol=atol), msg) - place = core.CPUPlace() var_dict = {'x': x, 'out': out, 'out@GRAD': out_grad, 'x@GRAD': x_grad} @@ -69,7 +70,81 @@ def check_if_mkldnn_primitives_exist_in_bwd(test_case, op_type, x, out, for name in ['x', 'out@GRAD']}, fetch_list=['x@GRAD', 'out']) - __assert_close(x_grad, out[0], 'x@GRAD') + __assert_close(test_case, x_grad, out[0], 'x@GRAD') + + +def check_if_mkldnn_batchnorm_primitives_exist_in_bwd( + test_case, var_dict, place, shape, data_layout): + + var_names = [ + 'x', 'scale', 'bias', 'mean', 'variance', 'y', 'saved_mean', + 'saved_variance' + ] + ground_truth = {name: var_dict[name] for name in var_names} + program = fluid.Program() + with fluid.program_guard(program): + block = program.global_block() + for name in ground_truth: + block.create_var( + name=name, dtype='float32', shape=ground_truth[name].shape) + bn_op = block.append_op( + type="batch_norm", + inputs={ + "X": block.var('x'), + "Scale": block.var('scale'), + "Bias": block.var('bias'), + "Mean": block.var('mean'), + "Variance": block.var('variance') + }, + outputs={ + "Y": block.var('y'), + "MeanOut": block.var('mean'), # share memory + "VarianceOut": block.var('variance'), # share memory + "SavedMean": block.var('saved_mean'), + "SavedVariance": block.var('saved_variance') + }, + attrs={ + "momentum": test_case.momentum, + "epsilon": test_case.epsilon, + "is_test": False, + "data_layout": data_layout, + "use_mkldnn": test_case.use_mkldnn, + "fuse_with_relu": test_case.fuse_with_relu, + "use_global_stats": test_case.use_global_stats + }) + block.create_var( + name='y@GRAD', dtype='float32', shape=var_dict['y'].shape) + + # generate backward op_desc + grad_op_desc_list, op_grad_to_var = core.get_grad_op_desc( + bn_op.desc, test_case.no_grad_set, []) + grad_op_desc = grad_op_desc_list[0] + new_op_desc = block.desc.append_op() + new_op_desc.copy_from(grad_op_desc) + for var_name in grad_op_desc.output_arg_names(): + block.desc.var(var_name.encode("ascii")) + grad_op_desc.infer_var_type(block.desc) + grad_op_desc.infer_shape(block.desc) + for arg in grad_op_desc.output_arg_names(): + grad_var = block.desc.find_var(arg.encode("ascii")) + grad_var.set_dtype(core.VarDesc.VarType.FP32) + + exe = fluid.Executor(place) + + # Do at least 2 iterations + for i in range(2): + out = exe.run( + program, + feed={ + name: var_dict[name] + for name in + ['x', 'scale', 'bias', 'mean', 'variance', 'y@GRAD'] + }, + fetch_list=test_case.fetch_list) + for id, name in enumerate(test_case.fetch_list): + __assert_close(test_case, var_dict[name], out[id], name) + + print("MKLDNN op test forward passed: ", str(place), data_layout) def format_reorder(out, size): diff --git a/python/paddle/fluid/tests/unittests/mkldnn/test_batch_norm_mkldnn_op.py b/python/paddle/fluid/tests/unittests/mkldnn/test_batch_norm_mkldnn_op.py index 5fce90372d9beda9b04ab68d0a8ac5ef5c124421..eb12470789ab9a6e416e829832986a11cd576474 100644 --- a/python/paddle/fluid/tests/unittests/mkldnn/test_batch_norm_mkldnn_op.py +++ b/python/paddle/fluid/tests/unittests/mkldnn/test_batch_norm_mkldnn_op.py @@ -22,6 +22,7 @@ import paddle.fluid as fluid from paddle.fluid.tests.unittests.op_test import OpTest from paddle.fluid.framework import grad_var_name from paddle.fluid.tests.unittests.test_batch_norm_op import TestBatchNormOpInference, TestBatchNormOpTraining, _reference_training, _reference_grad +from mkldnn_op_test import check_if_mkldnn_batchnorm_primitives_exist_in_bwd class TestMKLDNNBatchNormOpTraining(TestBatchNormOpTraining): @@ -43,6 +44,36 @@ class TestMKLDNNBatchNormOpTraining(TestBatchNormOpTraining): return y, mean_out, variance_out, saved_mean, saved_variance, x_grad, scale_grad, bias_grad +class TestMKLDNNBatchNormOpExistedPrimitives(TestMKLDNNBatchNormOpTraining): + def init_test_case(self): + TestMKLDNNBatchNormOpTraining.init_test_case(self) + self.fetch_list = ['y', 'x@GRAD'] + + def test_forward_backward(self): + place = core.CPUPlace() + shape = [2, 3, 4, 5] + scale_shape = [3] + data_layout = "NCHW" + # initialize the ground-truth + np.random.seed(123) + x = np.random.random_sample(shape).astype(np.float32) + scale = np.random.random_sample(scale_shape).astype(np.float32) + bias = np.random.random_sample(scale_shape).astype(np.float32) + mean, variance = self.set_mean_variance(scale_shape, x, data_layout) + y_grad = np.random.random_sample(shape).astype(np.float32) + + y, mean_out, variance_out, saved_mean, saved_variance, x_grad, scale_grad, bias_grad = self.ref_forward_backward( + x, y_grad, scale, bias, mean, variance, self.epsilon, self.momentum, + shape, data_layout) + var_dict = locals() + var_dict['y@GRAD'] = y_grad + var_dict['x@GRAD'] = x_grad + var_dict['scale@GRAD'] = scale_grad + var_dict['bias@GRAD'] = bias_grad + check_if_mkldnn_batchnorm_primitives_exist_in_bwd(self, var_dict, place, + shape, data_layout) + + class TestMKLDNNBatchNormOpInference(TestBatchNormOpInference): def init_kernel_type(self): self.use_mkldnn = True @@ -50,7 +81,6 @@ class TestMKLDNNBatchNormOpInference(TestBatchNormOpInference): def test_check_output(self): place = core.CPUPlace() data_format = "NCHW" - self.check_with_place(place, data_format, self.dtype, [2, 3, 4, 5]) @@ -62,7 +92,6 @@ class TestMKLDNNBatchNormOpWithReluInference(TestBatchNormOpInference): def test_check_output(self): place = core.CPUPlace() data_format = "NCHW" - self.check_with_place(place, data_format, self.dtype, [2, 3, 4, 5]) diff --git a/python/paddle/fluid/tests/unittests/mkldnn/test_gaussian_random_mkldnn_op.py b/python/paddle/fluid/tests/unittests/mkldnn/test_gaussian_random_mkldnn_op.py index c18bd77bd3e6de08283f3ac3a31c73453f3c9129..5ecf8cc80f7eb12b7ecd3a2238d92b2e71ceaa6d 100644 --- a/python/paddle/fluid/tests/unittests/mkldnn/test_gaussian_random_mkldnn_op.py +++ b/python/paddle/fluid/tests/unittests/mkldnn/test_gaussian_random_mkldnn_op.py @@ -19,7 +19,22 @@ import unittest from paddle.fluid.tests.unittests.test_gaussian_random_op import TestGaussianRandomOp -class TestMKLDNN(TestGaussianRandomOp): +class TestMKLDNNGaussianRandomOpSeed10(TestGaussianRandomOp): + def init_kernel_type(self): + self.use_mkldnn = True + + +class TestMKLDNNGaussianRandomOpSeed0(TestGaussianRandomOp): + def setUp(self): + TestGaussianRandomOp.setUp(self) + self.attrs = { + "shape": [1000, 784], + "mean": .0, + "std": 1., + "seed": 0, + "use_mkldnn": self.use_mkldnn + } + def init_kernel_type(self): self.use_mkldnn = True