From 734a9eeaa464510de1374c196e40325efd2f8edb Mon Sep 17 00:00:00 2001 From: Liu Yiqun Date: Thu, 7 Sep 2017 09:29:23 +0000 Subject: [PATCH] Correct the definition of Operator in TestFCGradOp, and rename the output name of identity to Y. --- paddle/operators/fc_op.cc | 24 +++++++++---------- paddle/operators/identity_op.cc | 4 ++-- paddle/operators/minus_op.cc | 2 +- .../paddle/v2/framework/tests/CMakeLists.txt | 1 + .../v2/framework/tests/gradient_checker.py | 4 ---- .../paddle/v2/framework/tests/test_fc_op.py | 17 +++++++++---- .../v2/framework/tests/test_minus_op.py | 4 ++-- .../tests/test_scale_and_identity_op.py | 4 ++-- 8 files changed, 32 insertions(+), 28 deletions(-) diff --git a/paddle/operators/fc_op.cc b/paddle/operators/fc_op.cc index 60bf6e9da..40b5128bf 100644 --- a/paddle/operators/fc_op.cc +++ b/paddle/operators/fc_op.cc @@ -24,30 +24,30 @@ class FCOp : public NetOp { const framework::VariableNameMap &outputs, const framework::AttributeMap &attrs) : NetOp(type, inputs, outputs, attrs) { + // mul_out = X * W AppendOp(framework::OpRegistry::CreateOp( "mul", {{"X", {Input("X")}}, {"Y", {Input("W")}}}, {{"Out", {Output("mul_out")}}}, {})); + + std::string add_out_name = "mul_out"; auto b = Input("b"); if (b != framework::kEmptyVarName) { + // add_out = mul_out + b AppendOp(framework::OpRegistry::CreateOp( "rowwise_add", {{"X", {Output("mul_out")}}, {"b", {Input("b")}}}, {{"Out", {Output("add_out")}}}, {})); + add_out_name = "add_out"; } else { - AppendOp(framework::OpRegistry::CreateOp( - "identity", {{"X", {Output("mul_out")}}}, - {{"Out", {Output("add_out")}}}, {})); + auto add_out = Output("add_out"); + if (add_out != framework::kEmptyVarName) { + this->Rename(add_out, framework::kEmptyVarName); + } } auto activation = GetAttr("activation"); - if (activation == "identity") { - AppendOp(framework::OpRegistry::CreateOp(activation, - {{"X", {Output("add_out")}}}, - {{"Out", {Output("Out")}}}, {})); - } else { - AppendOp(framework::OpRegistry::CreateOp(activation, - {{"X", {Output("add_out")}}}, - {{"Y", {Output("Out")}}}, {})); - } + AppendOp(framework::OpRegistry::CreateOp(activation, + {{"X", {Output(add_out_name)}}}, + {{"Y", {Output("Out")}}}, {})); CompleteAddOp(false); } }; diff --git a/paddle/operators/identity_op.cc b/paddle/operators/identity_op.cc index b67240fb9..b9f0a450f 100644 --- a/paddle/operators/identity_op.cc +++ b/paddle/operators/identity_op.cc @@ -27,7 +27,7 @@ class IdentityOpMaker : public framework::OpProtoAndCheckerMaker { framework::OpAttrChecker *op_checker) : OpProtoAndCheckerMaker(proto, op_checker) { AddInput("X", "input tensor of identity op"); - AddOutput("Out", "output tensor of identity op"); + AddOutput("Y", "output tensor of identity op"); AddComment("identity operator. Just a alias of scale op which scale = 1.0"); } }; @@ -40,7 +40,7 @@ class IdentityOp : public NetOp { const framework::AttributeMap &attrs) : NetOp(type, inputs, outputs, attrs) { AppendOp(framework::OpRegistry::CreateOp( - "scale", {{"X", {Input("X")}}}, {{"Out", {Output("Out")}}}, + "scale", {{"X", {Input("X")}}}, {{"Out", {Output("Y")}}}, {{"scale", static_cast(1)}})); CompleteAddOp(false); } diff --git a/paddle/operators/minus_op.cc b/paddle/operators/minus_op.cc index 069fb5e1a..1d7276a19 100644 --- a/paddle/operators/minus_op.cc +++ b/paddle/operators/minus_op.cc @@ -65,7 +65,7 @@ class MinusGradOp : public NetOp { // x_grad = out_grad AppendOp(framework::OpRegistry::CreateOp("identity", {{"X", {out_grad}}}, - {{"Out", {x_grad}}}, {})); + {{"Y", {x_grad}}}, {})); framework::AttributeMap scale_attr; scale_attr["scale"] = static_cast(-1); diff --git a/python/paddle/v2/framework/tests/CMakeLists.txt b/python/paddle/v2/framework/tests/CMakeLists.txt index f9c787a44..60ee996e4 100644 --- a/python/paddle/v2/framework/tests/CMakeLists.txt +++ b/python/paddle/v2/framework/tests/CMakeLists.txt @@ -18,6 +18,7 @@ py_test(test_gather_op SRCS test_gather_op.py) py_test(test_scatter_op SRCS test_scatter_op.py) py_test(test_fill_zeros_like_op SRCS test_fill_zeros_like_op.py) py_test(test_fc_op SRCS test_fc_op.py) +py_test(test_minus_op SRCS test_minus_op.py) py_test(gradient_checker SRCS gradient_checker.py) diff --git a/python/paddle/v2/framework/tests/gradient_checker.py b/python/paddle/v2/framework/tests/gradient_checker.py index fdb06b798..0607275a4 100644 --- a/python/paddle/v2/framework/tests/gradient_checker.py +++ b/python/paddle/v2/framework/tests/gradient_checker.py @@ -277,10 +277,6 @@ class GradientChecker(unittest.TestCase): if no_grad_set is None: no_grad_set = set() - no_tmp_out = forward_op.no_intermediate_outputs() - if len(no_tmp_out) != 1: - raise ValueError("non temp out_names should be 1") - inputs = forward_op.inputs() in_names = [item for k in inputs for item in inputs[k]] for no_grad in no_grad_set: diff --git a/python/paddle/v2/framework/tests/test_fc_op.py b/python/paddle/v2/framework/tests/test_fc_op.py index 140442db9..76b68ad61 100644 --- a/python/paddle/v2/framework/tests/test_fc_op.py +++ b/python/paddle/v2/framework/tests/test_fc_op.py @@ -29,13 +29,20 @@ class TestFCOp(unittest.TestCase): class TestFCGradOp(GradientChecker): def test_normal(self): self.inputs = { - "X": np.random.random((4, 4)).astype("float32"), - "W": np.random.random((4, 4)).astype("float32"), - "b": np.random.random(4).astype("float32") + "X": np.random.random((32, 256)).astype("float32"), + "W": np.random.random((256, 100)).astype("float32"), + "b": np.random.random(100).astype("float32") } op = Operator( - "fc", X="X", W="W", b="b", Out="Out", activation="sigmoid") - #self.check_grad(op, self.inputs, ["X", "W", "b"], "Out") + "fc", + X="X", + W="W", + b="b", + Out="Out", + mul_out="mul_out", + add_out="add_out", + activation="sigmoid") + self.check_grad(op, self.inputs, ["X", "W", "b"], "Out") if __name__ == '__main__': diff --git a/python/paddle/v2/framework/tests/test_minus_op.py b/python/paddle/v2/framework/tests/test_minus_op.py index 5abdd4a69..aa05d87ba 100644 --- a/python/paddle/v2/framework/tests/test_minus_op.py +++ b/python/paddle/v2/framework/tests/test_minus_op.py @@ -4,7 +4,7 @@ from gradient_checker import GradientChecker, create_op from op_test_util import OpTestMeta -class MinusOpTest(unittest.TestCase): +class TestMinusOp(unittest.TestCase): __metaclass__ = OpTestMeta def setUp(self): @@ -16,7 +16,7 @@ class MinusOpTest(unittest.TestCase): self.outputs = {'Out': (self.inputs['X'] - self.inputs['Y'])} -class MinusGradTest(GradientChecker): +class TestMinusGrad(GradientChecker): def test_left(self): op = create_op("minus") inputs = { diff --git a/python/paddle/v2/framework/tests/test_scale_and_identity_op.py b/python/paddle/v2/framework/tests/test_scale_and_identity_op.py index 69b301c37..4c1d48499 100644 --- a/python/paddle/v2/framework/tests/test_scale_and_identity_op.py +++ b/python/paddle/v2/framework/tests/test_scale_and_identity_op.py @@ -11,14 +11,14 @@ class IdentityTest(unittest.TestCase): def setUp(self): self.type = "identity" self.inputs = {'X': np.random.random((32, 784)).astype("float32")} - self.outputs = {'Out': self.inputs['X']} + self.outputs = {'Y': self.inputs['X']} class IdentityGradOpTest(GradientChecker): def test_normal(self): op = create_op("identity") inputs = {"X": np.random.random((10, 10)).astype("float32")} - self.check_grad(op, inputs, set("X"), "Out") + self.check_grad(op, inputs, set("X"), "Y") class ScaleTest(unittest.TestCase): -- GitLab