提交 f4491fa4 编写于 作者: F fengjiayi

Fix bug

上级 b0d22358
...@@ -158,8 +158,7 @@ static std::unique_ptr<OperatorBase> BackwardRecursive( ...@@ -158,8 +158,7 @@ static std::unique_ptr<OperatorBase> BackwardRecursive(
} }
insert_position.push_back( insert_position.push_back(
{dup_op.back(), {dup_op.back(),
OpRegistry::CreateOp( OpRegistry::CreateOp("sum", {{"X", {insert_add_x, insert_add_y}}},
"sum", {{"X", {insert_add_x}}, {"X", {insert_add_y}}},
{{"Out", {insert_add_out}}}, {})}); {{"Out", {insert_add_out}}}, {})});
} }
} }
...@@ -200,7 +199,8 @@ static std::unique_ptr<OperatorBase> BackwardRecursive( ...@@ -200,7 +199,8 @@ static std::unique_ptr<OperatorBase> BackwardRecursive(
// process recurrent gradient op as a special operator. // process recurrent gradient op as a special operator.
if (forwardOp.Type() == "recurrent") { if (forwardOp.Type() == "recurrent") {
// NOTE clean up cycle call somewhere (RNN's stepnet constains itself), or // NOTE clean up cycle call somewhere (RNN's stepnet constains itself),
// or
// this will result in infinite loop. // this will result in infinite loop.
const auto& rnnop = const auto& rnnop =
*static_cast<const operators::RecurrentOp*>(&forwardOp); *static_cast<const operators::RecurrentOp*>(&forwardOp);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册