diff --git a/paddle/operators/dropout_op.cc b/paddle/operators/dropout_op.cc index 60ad2efbe973b33485189f708e67da85d454330b..dc416f6194296fbc4f2473878839ed6cecaba63a 100644 --- a/paddle/operators/dropout_op.cc +++ b/paddle/operators/dropout_op.cc @@ -62,7 +62,7 @@ class DropoutOpGrad : public framework::OperatorWithKernel { auto mask_dims = ctx.Input("Mask")->dims(); auto out_dims = ctx.Input(framework::GradVarName("Out"))->dims(); PADDLE_ENFORCE_EQ(x_dims, out_dims, - "Dimensions of Input(X) and Out must be the same."); + "Dimensions of Input(X) and Out@Grad must be the same."); PADDLE_ENFORCE_EQ(x_dims, mask_dims, "Dimensions of Input(X) and Mask must be the same."); diff --git a/python/paddle/v2/framework/tests/test_dropout_op.py b/python/paddle/v2/framework/tests/test_dropout_op.py index 3f4738f6145187f06b093fdaf7ee3aa6ef9410d0..c5ff55f74c5c892e962fd351172c78fa61982fa4 100644 --- a/python/paddle/v2/framework/tests/test_dropout_op.py +++ b/python/paddle/v2/framework/tests/test_dropout_op.py @@ -14,7 +14,7 @@ class TestDropoutOpProbZero(unittest.TestCase): self.outputs = {'Out': self.inputs['X'], 'Mask': np.ones((32, 64))} -class TestDropoutOpAllProbOne(unittest.TestCase): +class TestDropoutOpProbOne(unittest.TestCase): __metaclass__ = OpTestMeta def setUp(self): @@ -24,7 +24,7 @@ class TestDropoutOpAllProbOne(unittest.TestCase): self.outputs = {'Out': np.zeros((32, 64)), 'Mask': np.zeros((32, 64))} -class DropoutGradOpTest(GradientChecker): +class TestDropoutGradOp(GradientChecker): def test_dropout_2d(self): op = create_op("dropout") inputs = {'X': np.random.random((10, 5)).astype("float32")}