From 64f105047ec56202d3150ace39c8ff524d6cfe96 Mon Sep 17 00:00:00 2001 From: superjomn Date: Fri, 19 Apr 2019 20:54:04 +0800 Subject: [PATCH] add program --- paddle/fluid/lite/api/cxx_api.h | 11 +-- paddle/fluid/lite/api/cxx_api_test.cc | 5 +- paddle/fluid/lite/core/CMakeLists.txt | 1 + paddle/fluid/lite/core/mir/ssa_graph.h | 12 +-- paddle/fluid/lite/core/op_executor.h | 45 +++-------- paddle/fluid/lite/core/optimizer.h | 6 +- paddle/fluid/lite/core/program.cc | 15 ++++ paddle/fluid/lite/core/program.h | 90 +++++++++++++++++++++ paddle/fluid/lite/core/program_fake_utils.h | 7 +- paddle/fluid/lite/core/scope.h | 9 +++ 10 files changed, 139 insertions(+), 62 deletions(-) create mode 100644 paddle/fluid/lite/core/program.cc create mode 100644 paddle/fluid/lite/core/program.h diff --git a/paddle/fluid/lite/api/cxx_api.h b/paddle/fluid/lite/api/cxx_api.h index d0d4e3d6c..f7016d568 100644 --- a/paddle/fluid/lite/api/cxx_api.h +++ b/paddle/fluid/lite/api/cxx_api.h @@ -24,16 +24,17 @@ struct Config {}; class Predictor { public: + Predictor() { scope_ = std::make_shared(); } + void Build(const std::string& model_path, const std::vector& valid_places) { CHECK(!executor_.get()) << "duplicate build found"; + CHECK(!scope_.get()) << "duplicate build found"; framework::proto::ProgramDesc prog; - LoadModel(model_path, &scope_, &prog); + LoadModel(model_path, scope_.get(), &prog); framework::ProgramDesc prog_desc(prog); - executor_.reset(new Executor(&scope_, valid_places)); - executor_->PrepareWorkspace(prog_desc); - executor_->Build(prog_desc); + executor_.reset(new Executor(prog_desc, scope_.get(), valid_places)); } // Get a tensor for input from scope directly. @@ -53,7 +54,7 @@ class Predictor { void Run() { executor_->Run(); } private: - Scope scope_; + std::shared_ptr scope_; std::unique_ptr executor_; }; diff --git a/paddle/fluid/lite/api/cxx_api_test.cc b/paddle/fluid/lite/api/cxx_api_test.cc index e1058de04..3f8a15170 100644 --- a/paddle/fluid/lite/api/cxx_api_test.cc +++ b/paddle/fluid/lite/api/cxx_api_test.cc @@ -26,8 +26,7 @@ TEST(CXXApi, raw) { LoadModel("/home/chunwei/project2/models/model2", &scope, &prog); framework::ProgramDesc prog_desc(prog); - lite::Executor executor(&scope, - {OpLite::Place{TARGET(kHost), PRECISION(kFloat)}}); + lite::Executor executor(&scope, {Place{TARGET(kHost), PRECISION(kFloat)}}); auto x = scope.Var("a")->GetMutable(); x->Resize({100, 100}); @@ -41,7 +40,7 @@ TEST(CXXApi, raw) { TEST(CXXApi, test) { lite::Predictor predictor; predictor.Build("/home/chunwei/project2/models/model2", - {OpLite::Place{TARGET(kHost), PRECISION(kFloat)}}); + {Place{TARGET(kHost), PRECISION(kFloat)}}); auto* x = predictor.GetInputTensor("a"); x->Resize({100, 200}); x->mutable_data(); diff --git a/paddle/fluid/lite/core/CMakeLists.txt b/paddle/fluid/lite/core/CMakeLists.txt index 51d15a250..c7aa67b47 100644 --- a/paddle/fluid/lite/core/CMakeLists.txt +++ b/paddle/fluid/lite/core/CMakeLists.txt @@ -18,6 +18,7 @@ cc_library(program_fake_utils SRCS program_fake_utils.cc DEPS mir_ssa_graph ops_lite host_kernels ) +cc_library(program_lite SRCS program.cc DEPS op_lite kernel_lite) cc_test(test_scope_lite SRCS scope_test.cc DEPS scope_lite) cc_test(test_kernel_lite SRCS kernel_test.cc DEPS target_wrapper_x86) diff --git a/paddle/fluid/lite/core/mir/ssa_graph.h b/paddle/fluid/lite/core/mir/ssa_graph.h index f9f49e3e9..2f7922cdb 100644 --- a/paddle/fluid/lite/core/mir/ssa_graph.h +++ b/paddle/fluid/lite/core/mir/ssa_graph.h @@ -22,22 +22,12 @@ #include "paddle/fluid/lite/core/kernel.h" #include "paddle/fluid/lite/core/mir/node.h" #include "paddle/fluid/lite/core/op_lite.h" +#include "paddle/fluid/lite/core/program.h" namespace paddle { namespace lite { namespace mir { -// A program is used to represent a code program, in Paddle, a code program -// contains: -// - main block, which is a list of OpLite -// - scope: which contains all the weights -struct Program { - std::list tmp_vars; - std::list weights; - std::list> ops; - lite::Scope *scope{}; -}; - // An Graph for MIR. It is built from a list of Op and a scope. class GraphBase {}; diff --git a/paddle/fluid/lite/core/op_executor.h b/paddle/fluid/lite/core/op_executor.h index afe491652..126467a9a 100644 --- a/paddle/fluid/lite/core/op_executor.h +++ b/paddle/fluid/lite/core/op_executor.h @@ -16,6 +16,7 @@ #include "paddle/fluid/framework/program_desc.h" #include "paddle/fluid/lite/core/op_lite.h" #include "paddle/fluid/lite/core/op_registry.h" +#include "paddle/fluid/lite/core/program.h" #include "paddle/fluid/lite/core/scope.h" namespace paddle { @@ -24,41 +25,16 @@ namespace lite { // The Executor is used to run the operators. class Executor { public: - Executor(lite::Scope* scope, const std::vector& valid_places) - : scope_(scope), valid_places_(valid_places) {} - - // Create temporary variables. - void PrepareWorkspace(framework::ProgramDesc& program) { - CHECK(!exec_scope_) << "Duplicate PrepareWorkspace found"; - exec_scope_ = &scope_->NewScope(); - - for (auto var_desc : program.Block(0).AllVars()) { - if (!var_desc->Persistable()) { - auto* var = exec_scope_->Var(var_desc->Name()); - LOG(INFO) << "create tmp var " << var_desc->Name() << " " << var; - } - } - } - - // Build from a program and scope. - void Build(framework::ProgramDesc& program) { - CHECK(ops_.empty()) << "Executor duplicate Build found"; - - // Create operators. - for (auto* op_desc : program.Block(0).AllOps()) { - auto op_type = op_desc->Type(); - if (op_type == "feed" || op_type == "fetch") continue; - LOG(INFO) << "create Op [" << op_type << "]"; - ops_.emplace_back(LiteOpRegistry::Global().Create(op_type)); - // pick initial kernel - ops_.back()->PickKernel(valid_places_); - ops_.back()->Attach(*op_desc, exec_scope_); - } + Executor(const framework::ProgramDesc& desc, + const std::shared_ptr& scope, + const std::vector& valid_places) + : valid_places_(valid_places) { + program_.reset(new Program(desc, scope, valid_places)); } // Run the program. void Run() { - for (auto& op : ops_) { + for (auto& op : program_->ops) { LOG(INFO) << op->DebugString(); // TODO(Superjomn) check only once op->CheckShape(); @@ -67,14 +43,11 @@ class Executor { } } - lite::Scope* scope() { return scope_; } - lite::Scope* exec_scope() { return exec_scope_; } + const Program& program() const { return *program_; } private: - std::vector> ops_; - lite::Scope* scope_{}; std::vector valid_places_; - lite::Scope* exec_scope_{}; + std::unique_ptr program_; }; } // namespace lite diff --git a/paddle/fluid/lite/core/optimizer.h b/paddle/fluid/lite/core/optimizer.h index dabe75f18..38c53442d 100644 --- a/paddle/fluid/lite/core/optimizer.h +++ b/paddle/fluid/lite/core/optimizer.h @@ -27,7 +27,7 @@ namespace lite { */ class Optimizer { public: - void Run(mir::Program&& program, const std::vector& valid_places, + void Run(Program&& program, const std::vector& valid_places, const std::vector& passes = {}) { CHECK(!graph_) << "duplicate optimize found"; graph_.reset(new mir::SSAGraph); @@ -36,8 +36,8 @@ class Optimizer { } // Generate a new program based on the mir graph. - std::unique_ptr GenProgram() { - std::unique_ptr res; + std::unique_ptr GenProgram() { + std::unique_ptr res; return res; } diff --git a/paddle/fluid/lite/core/program.cc b/paddle/fluid/lite/core/program.cc new file mode 100644 index 000000000..7a528740e --- /dev/null +++ b/paddle/fluid/lite/core/program.cc @@ -0,0 +1,15 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "paddle/fluid/lite/core/program.h" diff --git a/paddle/fluid/lite/core/program.h b/paddle/fluid/lite/core/program.h new file mode 100644 index 000000000..14eb0eda3 --- /dev/null +++ b/paddle/fluid/lite/core/program.h @@ -0,0 +1,90 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once +#include +#include +#include +#include "paddle/fluid/framework/program_desc.h" +#include "paddle/fluid/lite/core/kernel.h" +#include "paddle/fluid/lite/core/op_lite.h" +#include "paddle/fluid/lite/core/op_registry.h" + +namespace paddle { +namespace lite { + +// A program is used to represent a code program, in Paddle, a code program +// contains: +// - main block, which is a list of OpLite +// - scope: which contains all the weights +struct Program { + std::list tmp_vars; + std::list weights; + std::list> ops; + // the scope to run the kernels, NOTE not the root scope. + std::shared_ptr scope; + // Runtime scope. + lite::Scope* exec_scope{}; + + explicit Program(const std::shared_ptr& root) { scope = root; } + Program(const framework::ProgramDesc& desc, + const std::shared_ptr& root, + const std::vector& valid_places) { + scope = root; + PrepareWorkspace(desc); + Build(desc, valid_places); + } + + std::unique_ptr Clone() const { + std::unique_ptr res(new Program(scope)); + res->tmp_vars = tmp_vars; + res->weights = weights; + res->ops = ops; + return res; + } + + private: + // Build from a program and scope. + void Build(const framework::ProgramDesc& program, + const std::vector& valid_places) { + CHECK(ops.empty()) << "Executor duplicate Build found"; + + // Create operators. + for (auto* op_desc : program.Block(0).AllOps()) { + auto op_type = op_desc->Type(); + if (op_type == "feed" || op_type == "fetch") continue; + LOG(INFO) << "create Op [" << op_type << "]"; + ops.emplace_back(LiteOpRegistry::Global().Create(op_type)); + // pick initial kernel + ops.back()->PickKernel(valid_places); + ops.back()->Attach(*op_desc, exec_scope); + } + } + + // Create temporary variables. + void PrepareWorkspace(const framework::ProgramDesc& program) { + CHECK(!exec_scope) << "Duplicate PrepareWorkspace found"; + exec_scope = &scope->NewScope(); + + for (auto var_desc : program.Block(0).AllVars()) { + if (!var_desc->Persistable()) { + auto* var = exec_scope->Var(var_desc->Name()); + LOG(INFO) << "create tmp var " << var_desc->Name() << " " << var; + } + } + } +}; + +} // namespace lite +} // namespace paddle diff --git a/paddle/fluid/lite/core/program_fake_utils.h b/paddle/fluid/lite/core/program_fake_utils.h index 867cfc780..30f40cd9f 100644 --- a/paddle/fluid/lite/core/program_fake_utils.h +++ b/paddle/fluid/lite/core/program_fake_utils.h @@ -20,9 +20,8 @@ namespace paddle { namespace lite { -mir::Program FakeProgram() { - mir::Program program; - program.scope = new lite::Scope; +Program FakeProgram() { + Program program(std::make_shared()); auto add_fc = [&](int id, std::string x) { // create variables @@ -48,7 +47,7 @@ mir::Program FakeProgram() { auto fc_op = LiteOpRegistry::Global().Create("fc"); fc_op->PickKernel({Place{TARGET(kHost), PRECISION(kFloat)}}); - fc_op->Attach(desc, program.scope); + fc_op->Attach(desc, program.scope.get()); program.ops.emplace_back(std::move(fc_op)); w1v->Resize({100, 100}); diff --git a/paddle/fluid/lite/core/scope.h b/paddle/fluid/lite/core/scope.h index 1709dfe88..c67087e8a 100644 --- a/paddle/fluid/lite/core/scope.h +++ b/paddle/fluid/lite/core/scope.h @@ -39,6 +39,15 @@ class Scope final { const Scope* parent() const { return parent_; } + // Following the legacy scope interface. + std::vector LocalVarNames() const { + std::vector keys; + for (const auto& item : vars_) { + keys.push_back(item.first); + } + return keys; + } + private: // Scope in `kids_` are owned by this class. mutable std::list kids_; -- GitLab