未验证 提交 4e05ea29 编写于 作者: Z zhupengyang 提交者: GitHub

remove feed and fetch for npu subgraph pass (#2230)

test=develop
上级 f0a6c1eb
......@@ -207,6 +207,8 @@ void SubgraphProgramPass::InferOnce(const std::unique_ptr<SSAGraph>& graph) {
if (!item->IsStmt()) continue;
auto& stmt = item->AsStmt();
auto& op = stmt.op();
std::string op_type = op->op_info()->Type();
if (op_type == "feed" || op_type == "fetch") continue;
op->CheckShape();
op->InferShape();
// TOOD(xxx): remove Launch() at last
......
......@@ -126,7 +126,6 @@ class Optimizer {
valid_places_.end(),
Place{TARGET(kNPU), PRECISION(kFloat)}) !=
valid_places_.end()) {
CheckInputDimsNotEmpty(exec_scope_);
auto pass = mir::PassManager::Global()
.LookUp<mir::subgraph::GenerateNPUProgramPass>(
"generate_npu_program_pass");
......@@ -150,19 +149,6 @@ class Optimizer {
return program;
}
// check the input dims in the scope, must not be empty
void CheckInputDimsNotEmpty(const lite::Scope* scope) {
CHECK(scope);
auto* feed_var = scope->FindVar("feed");
CHECK(feed_var) << "no feed variable in exec_scope: " << scope;
auto* feed_tensor_list = feed_var->GetMutable<std::vector<lite::Tensor>>();
CHECK_GE(feed_tensor_list->size(), 1);
for (size_t i = 0; i < feed_tensor_list->size(); ++i) {
CHECK(!feed_tensor_list->at(i).dims().empty())
<< "Input " << i << " dims can not be empty.";
}
}
void InitTargetTypeTransformPass() {
auto* pass =
mir::PassManager::Global().LookUp<mir::TypeTargetTransformPass>(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册