提交 2594a502 编写于 作者: Y Yu Yang

Polish code

上级 c4effc7d
...@@ -203,7 +203,6 @@ static std::unique_ptr<OperatorBase> BackwardRecursive( ...@@ -203,7 +203,6 @@ static std::unique_ptr<OperatorBase> BackwardRecursive(
} }
} else { } else {
std::unique_ptr<OperatorBase> grad_op(CreateGradOp(forwardOp)); std::unique_ptr<OperatorBase> grad_op(CreateGradOp(forwardOp));
PADDLE_ENFORCE(grad_op != nullptr);
ForEachVarName(grad_op->Inputs(), [&no_grad_names, &net, &grad_op]( ForEachVarName(grad_op->Inputs(), [&no_grad_names, &net, &grad_op](
const std::string& grad_input) { const std::string& grad_input) {
......
...@@ -171,17 +171,6 @@ REGISTER_OP_WITHOUT_GRADIENT(fc, f::FcOp, f::FcOpMaker); ...@@ -171,17 +171,6 @@ REGISTER_OP_WITHOUT_GRADIENT(fc, f::FcOp, f::FcOpMaker);
REGISTER_OP(many_output_op, f::NOP, f::ManyOutputOpMaker, many_output_op_grad, REGISTER_OP(many_output_op, f::NOP, f::ManyOutputOpMaker, many_output_op_grad,
f::NOP); f::NOP);
// TEST(Backward, simple_op_grad) {
// auto fwd = f::OpRegistry::CreateOp(
// "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
// ASSERT_NE(fwd, nullptr);
// auto gop = f::OpRegistry::CreateGradOp(*fwd);
// ASSERT_EQ(1UL, gop->Inputs().size());
// ASSERT_EQ("rowwise_add_grad", gop->Type());
// ASSERT_EQ(f::GradVarName("x"), gop->Output(f::GradVarName("X")));
// ASSERT_EQ(f::GradVarName("b"), gop->Output(f::GradVarName("b")));
//}
TEST(Backward, simple_op_not_need_grad) { TEST(Backward, simple_op_not_need_grad) {
auto fwd = f::OpRegistry::CreateOp( auto fwd = f::OpRegistry::CreateOp(
"rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {}); "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
...@@ -390,7 +379,6 @@ TEST(Backward, linear_net_intermediate_variable_has_no_grad) { ...@@ -390,7 +379,6 @@ TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
+ 1UL /* external output number*/ + 1UL /* external output number*/
+ 1UL /* number of gradient of external output*/ + 1UL /* number of gradient of external output*/
+ 2U /* internal variable number*/); + 2U /* internal variable number*/);
EXPECT_EQ(grad_fc.Outputs(all).size(), EXPECT_EQ(grad_fc.Outputs(all).size(),
2UL /* input number of mul*/ 2UL /* input number of mul*/
+ 2UL /* input number of rowwise_add + 2UL /* input number of rowwise_add
......
...@@ -23,8 +23,6 @@ ...@@ -23,8 +23,6 @@
#include "paddle/framework/type_defs.h" #include "paddle/framework/type_defs.h"
#include "paddle/platform/macros.h" #include "paddle/platform/macros.h"
#include "glog/logging.h"
namespace paddle { namespace paddle {
namespace framework { namespace framework {
......
...@@ -55,7 +55,6 @@ std::unique_ptr<OperatorBase> OpRegistry::CreateOp(const OpDesc& op_desc) { ...@@ -55,7 +55,6 @@ std::unique_ptr<OperatorBase> OpRegistry::CreateOp(const OpDesc& op_desc) {
} }
std::unique_ptr<OperatorBase> OpRegistry::CreateOp(OpDescBind* op_desc) { std::unique_ptr<OperatorBase> OpRegistry::CreateOp(OpDescBind* op_desc) {
op_desc->Sync();
return CreateOp(op_desc->Type(), op_desc->Inputs(), op_desc->Outputs(), return CreateOp(op_desc->Type(), op_desc->Inputs(), op_desc->Outputs(),
op_desc->GetAttrMap()); op_desc->GetAttrMap());
} }
......
...@@ -71,7 +71,6 @@ class MeanGradMaker : public framework::SingleGradOpDescMaker { ...@@ -71,7 +71,6 @@ class MeanGradMaker : public framework::SingleGradOpDescMaker {
} // namespace paddle } // namespace paddle
namespace ops = paddle::operators; namespace ops = paddle::operators;
REGISTER_OPERATOR(mean, ops::MeanOp, ops::MeanOpMaker, ops::MeanGradMaker); REGISTER_OPERATOR(mean, ops::MeanOp, ops::MeanOpMaker, ops::MeanGradMaker);
REGISTER_OPERATOR(mean_grad, ops::MeanGradOp); REGISTER_OPERATOR(mean_grad, ops::MeanGradOp);
REGISTER_OP_CPU_KERNEL(mean, REGISTER_OP_CPU_KERNEL(mean,
......
...@@ -16,11 +16,6 @@ ...@@ -16,11 +16,6 @@
#include <paddle/function/TensorType.h> #include <paddle/function/TensorType.h>
#include <iostream> #include <iostream>
#define DBG_LINE() \
do { \
std::cerr << "Run at " << __LINE__ << std::endl; \
} while (false)
namespace paddle { namespace paddle {
namespace operators { namespace operators {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册