diff --git a/paddle/fluid/framework/details/memory_optimize_pass.cc b/paddle/fluid/framework/details/memory_optimize_pass.cc index aa6641d3f26ae44dc0f81926f35dc8c8ca51e261..b35b967c72dcec7c427f6921c094f7a87d536f25 100644 --- a/paddle/fluid/framework/details/memory_optimize_pass.cc +++ b/paddle/fluid/framework/details/memory_optimize_pass.cc @@ -46,7 +46,6 @@ namespace details { std::unique_ptr MemoryOptimizePass::ApplyImpl( std::unique_ptr graph) const { auto nodes = graph->Nodes(); - ClearControlDepVars(graph.get()); CollectSkipVarsSet(nodes); cfg_.reset(new details::ControlFlowGraph(*graph)); @@ -79,7 +78,7 @@ std::unique_ptr MemoryOptimizePass::ApplyImpl( ir::Node* cache = pool_.FindBestFitNode(var); while (cache != nullptr && var->Name() == cache->Name()) { VLOG(3) << "The same cache variable is cascade reused. " - << var->Name() << " is re-filled to the pool after" + << cache->Name() << " is re-filled to the pool after " << "the reused op is finished. Current op can not " << "replace it again. Skip this candidate."; cache = pool_.FindNextBestFitNode(var, cache); @@ -325,32 +324,6 @@ void MemoryOptimizePass::RenameVarInGraphNode(const std::string& var, } } -void MemoryOptimizePass::ClearControlDepVars(ir::Graph* graph) const { - for (auto& op : graph->Nodes()) { - if (!op->IsOp()) continue; - { - auto& nodes = op->inputs; - nodes.erase( - std::remove_if(nodes.begin(), nodes.end(), - [&](ir::Node* var) { return var->IsCtrlVar(); }), - nodes.end()); - } - { - auto& nodes = op->outputs; - nodes.erase( - std::remove_if(nodes.begin(), nodes.end(), - [&](ir::Node* var) { return var->IsCtrlVar(); }), - nodes.end()); - } - } - - for (auto& node : graph->Nodes()) { - if (node->IsCtrlVar()) { - graph->RemoveNode(node); - } - } -} - } // namespace details } // namespace framework } // namespace paddle diff --git a/paddle/fluid/framework/details/memory_optimize_pass.h b/paddle/fluid/framework/details/memory_optimize_pass.h index f5d188101ffe60fb190702f15753491535f68a3a..593ffc10fc99d26b1ee9174ceef081581126e7e8 100644 --- a/paddle/fluid/framework/details/memory_optimize_pass.h +++ b/paddle/fluid/framework/details/memory_optimize_pass.h @@ -48,7 +48,6 @@ class MemoryOptimizePass : public ir::Pass { void RenameVarInGraphNode(const std::string& var, const std::string& cache_var, size_t idx, ir::Graph* graph) const; - void ClearControlDepVars(ir::Graph* graph) const; void SubGraphOptimize(OpDesc* op_desc) const; // 1. scan op with subblock and collect the output/input vars.