未验证 提交 759dae04 编写于 作者: X Xin Pan 提交者: GitHub

Merge pull request #13461 from panyx0718/ir3

clean unused inference_optimize c++ implementation
...@@ -183,28 +183,5 @@ void Prune(const proto::ProgramDesc& input, proto::ProgramDesc* output) { ...@@ -183,28 +183,5 @@ void Prune(const proto::ProgramDesc& input, proto::ProgramDesc* output) {
output->clear_blocks(); output->clear_blocks();
prune_impl(input, output, 0, -1, &dependent_vars); prune_impl(input, output, 0, -1, &dependent_vars);
} }
void inference_optimize_impl(proto::ProgramDesc* input, int block_id) {
auto* op_field = input->mutable_blocks(block_id)->mutable_ops();
for (auto& op_desc : *op_field) {
for (auto& attr : *op_desc.mutable_attrs()) {
if (attr.name() == "is_test") {
attr.set_b(true);
break;
}
}
}
}
void InferenceOptimize(const proto::ProgramDesc& input,
proto::ProgramDesc* output) {
*output = input;
int num_blocks = output->blocks_size();
PADDLE_ENFORCE_GT(num_blocks, 0, "ProgramDesc must have at least one block");
for (int i = 0; i < num_blocks; ++i) {
inference_optimize_impl(output, i);
}
}
} // namespace framework } // namespace framework
} // namespace paddle } // namespace paddle
...@@ -22,8 +22,5 @@ namespace framework { ...@@ -22,8 +22,5 @@ namespace framework {
void Prune(const proto::ProgramDesc& input, proto::ProgramDesc* output); void Prune(const proto::ProgramDesc& input, proto::ProgramDesc* output);
void InferenceOptimize(const proto::ProgramDesc& input,
proto::ProgramDesc* output);
} // namespace framework } // namespace framework
} // namespace paddle } // namespace paddle
...@@ -396,11 +396,6 @@ All parameter, weight, gradient are variables in Paddle. ...@@ -396,11 +396,6 @@ All parameter, weight, gradient are variables in Paddle.
Prune(*prog_with_targets.Proto(), &pruned_desc); Prune(*prog_with_targets.Proto(), &pruned_desc);
return new ProgramDesc(pruned_desc); return new ProgramDesc(pruned_desc);
}); });
m.def("inference_optimize", [](ProgramDesc &origin) {
proto::ProgramDesc pruned_desc;
InferenceOptimize(*(origin.Proto()), &pruned_desc);
return new ProgramDesc(pruned_desc);
});
m.def("empty_var_name", m.def("empty_var_name",
[]() { return std::string(framework::kEmptyVarName); }); []() { return std::string(framework::kEmptyVarName); });
m.def("grad_var_suffix", m.def("grad_var_suffix",
......
...@@ -1738,8 +1738,6 @@ class Program(object): ...@@ -1738,8 +1738,6 @@ class Program(object):
Returns: Returns:
Program: The new program. Program: The new program.
""" """
# this is an alternative implement before
# core.inference_optimize being fixed.
res = Program() res = Program()
res.desc = core.ProgramDesc(self.desc) res.desc = core.ProgramDesc(self.desc)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册