From d02b0a08f7e2846a95f347d875de5a13a36349c3 Mon Sep 17 00:00:00 2001 From: Megvii Engine Team Date: Tue, 28 Sep 2021 18:08:42 +0800 Subject: [PATCH] feat(lite): fix typo GitOrigin-RevId: 8c46aa3a303b8a9ad5e29b8779bb4961fa3e3a85 --- imperative/src/impl/backward_graph_opt.cpp | 2 +- imperative/src/impl/op_def.cpp | 8 ++++---- imperative/src/impl/ops/utility.cpp | 12 ++++++------ imperative/src/impl/proxy_graph.cpp | 4 ++-- imperative/src/impl/proxy_graph.h | 2 +- imperative/src/impl/proxy_graph_detail.cpp | 2 +- imperative/src/impl/subgraph.cpp | 4 ++-- imperative/src/impl/subgraph_detail.cpp | 6 +++--- .../include/megbrain/imperative/backward_graph_opt.h | 2 +- .../src/include/megbrain/imperative/graph_builder.h | 2 +- imperative/src/include/megbrain/imperative/op_def.h | 4 ++-- .../src/include/megbrain/imperative/ops/utility.h | 2 +- .../include/megbrain/imperative/proxy_graph_detail.h | 2 +- .../src/include/megbrain/imperative/subgraph.h | 10 +++++----- .../include/megbrain/imperative/subgraph_detail.h | 4 ++-- imperative/src/test/backward_graph.cpp | 2 +- lite/pylite/pylite.md | 4 ++-- lite/pylite/test/test_global.py | 2 +- lite/pylite/test/test_network.py | 2 +- lite/pylite/test/test_network_cuda.py | 2 +- 20 files changed, 39 insertions(+), 39 deletions(-) diff --git a/imperative/src/impl/backward_graph_opt.cpp b/imperative/src/impl/backward_graph_opt.cpp index 007f18c2a..2ec1fa275 100644 --- a/imperative/src/impl/backward_graph_opt.cpp +++ b/imperative/src/impl/backward_graph_opt.cpp @@ -16,7 +16,7 @@ using namespace mgb; using namespace imperative; -OptimizedBackwardGraphResult::OptimizedBackwardGraphResult(const EncodedSubraph& src) +OptimizedBackwardGraphResult::OptimizedBackwardGraphResult(const EncodedSubgraph& src) : input_has_grad(src.output_mask) { if (src.graph.exprs.size() <= 1) { // backward graph only contains a single op diff --git a/imperative/src/impl/op_def.cpp b/imperative/src/impl/op_def.cpp index 7e2b00c6f..225417b23 100644 --- a/imperative/src/impl/op_def.cpp +++ b/imperative/src/impl/op_def.cpp @@ -80,12 +80,12 @@ std::tuple, bool> OpDef::infer_output_attrs_falli return def.trait()->infer_output_attrs_fallible(def, inputs); } -EncodedSubraph OpDef::make_backward_graph( +EncodedSubgraph OpDef::make_backward_graph( const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, const SmallVector& output_has_grad) { - using BackwardGraphCache = OpMethResultCache, SmallVector>; + using BackwardGraphCache = OpMethResultCache, SmallVector>; thread_local BackwardGraphCache cache; decltype(cache)::key_t cache_key{const_cast(def).shared_from_this(), inputs, {input_requires_grad, output_has_grad}}; auto iter = cache.find(cache_key); @@ -100,10 +100,10 @@ std::vector> OpDef::props( return def.trait()->props(def); } -EncodedSubraph OpDef::make_forward_graph( +EncodedSubgraph OpDef::make_forward_graph( const OpDef& def, const SmallVector& inputs){ - using ForwardGraphCache = OpMethResultCache, SmallVector>; + using ForwardGraphCache = OpMethResultCache, SmallVector>; thread_local ForwardGraphCache cache; decltype(cache)::key_t cache_key{const_cast(def).shared_from_this(), inputs}; auto iter = cache.find(cache_key); diff --git a/imperative/src/impl/ops/utility.cpp b/imperative/src/impl/ops/utility.cpp index 5c208ed5d..6d754d0b0 100644 --- a/imperative/src/impl/ops/utility.cpp +++ b/imperative/src/impl/ops/utility.cpp @@ -182,11 +182,11 @@ OP_TRAIT_REG(Identity, Identity) namespace { namespace subgraph { -EncodedSubraph make_forward_graph(const OpDef& def, SmallVector inputs) { - return EncodedSubraph::make(*def.cast_final_safe().graph); +EncodedSubgraph make_forward_graph(const OpDef& def, SmallVector inputs) { + return EncodedSubgraph::make(*def.cast_final_safe().graph); } -EncodedSubraph make_backward_graph( +EncodedSubgraph make_backward_graph( const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, @@ -199,7 +199,7 @@ EncodedSubraph make_backward_graph( } } auto bgraph = subgraph_detail::make_backward_graph(def, inputs, input_requires_grad, output_has_grad); - return EncodedSubraph::make_single( + return EncodedSubgraph::make_single( SubgraphOp::make(op.name + "Grad", std::make_shared(bgraph.graph)), bgraph.input_mask, bgraph.output_mask); @@ -430,7 +430,7 @@ std::tuple, SmallVector> infer_output_mem_de return {}; } -EncodedSubraph make_backward_graph( +EncodedSubgraph make_backward_graph( const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, @@ -452,7 +452,7 @@ EncodedSubraph make_backward_graph( grad_outputs_has_grad, key); } auto compiled_op = CompiledOp::make(bgraph_op, op.gopt_level); - auto encoded_graph = EncodedSubraph::make_single(compiled_op, backward_graph.input_mask, backward_graph.output_mask); + auto encoded_graph = EncodedSubgraph::make_single(compiled_op, backward_graph.input_mask, backward_graph.output_mask); return encoded_graph; } diff --git a/imperative/src/impl/proxy_graph.cpp b/imperative/src/impl/proxy_graph.cpp index f5d82ba5d..0b9eb3d32 100644 --- a/imperative/src/impl/proxy_graph.cpp +++ b/imperative/src/impl/proxy_graph.cpp @@ -669,7 +669,7 @@ struct ProxyGraph::GradGraph { cg::VarNode* grad; }; -EncodedSubraph +EncodedSubgraph ProxyGraph::make_backward_graph( const OpDef& opdef, const SmallVector& input_descs, @@ -704,7 +704,7 @@ ProxyGraph::make_backward_graph( } auto* gfunc = cg::lookup_grad_func(fwd->dyn_typeinfo()); - EncodedSubraph result; + EncodedSubgraph result; auto&& igraph = result.graph; size_t nr_backward_graph_inputs = 0; diff --git a/imperative/src/impl/proxy_graph.h b/imperative/src/impl/proxy_graph.h index eecd7191a..90d076bb1 100644 --- a/imperative/src/impl/proxy_graph.h +++ b/imperative/src/impl/proxy_graph.h @@ -40,7 +40,7 @@ public: const SmallVector& outputs, const SmallVector& workspace); - EncodedSubraph make_backward_graph( + EncodedSubgraph make_backward_graph( const OpDef& opdef, const SmallVector& input_descs, const SmallVector& input_requires_grad, diff --git a/imperative/src/impl/proxy_graph_detail.cpp b/imperative/src/impl/proxy_graph_detail.cpp index 7546d2901..be6842efa 100644 --- a/imperative/src/impl/proxy_graph_detail.cpp +++ b/imperative/src/impl/proxy_graph_detail.cpp @@ -113,7 +113,7 @@ void execute(const OpDef& def, // return graph->infer_output_attrs_fallible(def, inputs); // } -EncodedSubraph +EncodedSubgraph make_backward_graph(const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, diff --git a/imperative/src/impl/subgraph.cpp b/imperative/src/impl/subgraph.cpp index 212627b7f..d69f8ccc3 100644 --- a/imperative/src/impl/subgraph.cpp +++ b/imperative/src/impl/subgraph.cpp @@ -101,7 +101,7 @@ void Subgraph::replace_vars( } } -std::string EncodedSubraph::repr() const { +std::string EncodedSubgraph::repr() const { std::string buffer; buffer.push_back('|'); for (size_t i = 0; i < input_mask.size(); ++i) { @@ -118,7 +118,7 @@ std::string EncodedSubraph::repr() const { return buffer; } -size_t EncodedSubraph::hash() const { +size_t EncodedSubgraph::hash() const { return std::hash{}(repr()); } diff --git a/imperative/src/impl/subgraph_detail.cpp b/imperative/src/impl/subgraph_detail.cpp index c345a3077..108698ee9 100644 --- a/imperative/src/impl/subgraph_detail.cpp +++ b/imperative/src/impl/subgraph_detail.cpp @@ -76,11 +76,11 @@ SmallVector apply_on_physical_tensor( return outputs; } -static EncodedSubraph make_backward_graph_from_forward( +static EncodedSubgraph make_backward_graph_from_forward( const SmallVector& inputs, const SmallVector& input_requires_grad, const SmallVector& output_has_grad, - EncodedSubraph forward_graph) { + EncodedSubgraph forward_graph) { using namespace std::placeholders; using var_t = Subgraph::var_t; using vars_t = Subgraph::vars_t; @@ -149,7 +149,7 @@ static EncodedSubraph make_backward_graph_from_forward( return backward_graph; } -EncodedSubraph make_backward_graph( +EncodedSubgraph make_backward_graph( const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, diff --git a/imperative/src/include/megbrain/imperative/backward_graph_opt.h b/imperative/src/include/megbrain/imperative/backward_graph_opt.h index de1e2c1c8..a60847e50 100644 --- a/imperative/src/include/megbrain/imperative/backward_graph_opt.h +++ b/imperative/src/include/megbrain/imperative/backward_graph_opt.h @@ -19,7 +19,7 @@ struct OptimizedBackwardGraphResult { SmallVector save_for_backward; SmallVector input_has_grad; - OptimizedBackwardGraphResult(const EncodedSubraph& bgraph); + OptimizedBackwardGraphResult(const EncodedSubgraph& bgraph); }; } // namespace mgb::imperative diff --git a/imperative/src/include/megbrain/imperative/graph_builder.h b/imperative/src/include/megbrain/imperative/graph_builder.h index 05185c60a..0e59c42f2 100644 --- a/imperative/src/include/megbrain/imperative/graph_builder.h +++ b/imperative/src/include/megbrain/imperative/graph_builder.h @@ -29,7 +29,7 @@ class Subgraph::Builder { using desc_t = TDesc; using descs_t = SmallVector; using infer_fn_t = std::function; - using encoded_graph_t = EncodedSubraph; + using encoded_graph_t = EncodedSubgraph; using var_map_t = std::unordered_map; vars_t m_inputs; SmallVector> m_constants; diff --git a/imperative/src/include/megbrain/imperative/op_def.h b/imperative/src/include/megbrain/imperative/op_def.h index 85f4844e2..31400b66d 100644 --- a/imperative/src/include/megbrain/imperative/op_def.h +++ b/imperative/src/include/megbrain/imperative/op_def.h @@ -87,7 +87,7 @@ public: const SmallVector& inputs_tensors, const SmallVector& inputs_mems); - static EncodedSubraph make_backward_graph( + static EncodedSubgraph make_backward_graph( const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, @@ -96,7 +96,7 @@ public: static std::vector> props( const OpDef& def); - static EncodedSubraph make_forward_graph( + static EncodedSubgraph make_forward_graph( const OpDef& def, const SmallVector& inputs); diff --git a/imperative/src/include/megbrain/imperative/ops/utility.h b/imperative/src/include/megbrain/imperative/ops/utility.h index e5bbb14c4..a7e1af3ac 100644 --- a/imperative/src/include/megbrain/imperative/ops/utility.h +++ b/imperative/src/include/megbrain/imperative/ops/utility.h @@ -40,7 +40,7 @@ struct ShapeInfer final : OpDefImplBase { std::shared_ptr op; SmallVector devices; SmallVector dtypes; - EncodedSubraph graph; + EncodedSubgraph graph; ShapeInfer() = default; ShapeInfer(std::shared_ptr op, SmallVector devices, SmallVector dtypes) diff --git a/imperative/src/include/megbrain/imperative/proxy_graph_detail.h b/imperative/src/include/megbrain/imperative/proxy_graph_detail.h index 4ff95dcef..e8b08f26f 100644 --- a/imperative/src/include/megbrain/imperative/proxy_graph_detail.h +++ b/imperative/src/include/megbrain/imperative/proxy_graph_detail.h @@ -38,7 +38,7 @@ void exec(const OpDef& def, const SmallVector& inputs, const SmallVector& outputs); -EncodedSubraph +EncodedSubgraph make_backward_graph(const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, diff --git a/imperative/src/include/megbrain/imperative/subgraph.h b/imperative/src/include/megbrain/imperative/subgraph.h index dce8dc6fb..897280283 100644 --- a/imperative/src/include/megbrain/imperative/subgraph.h +++ b/imperative/src/include/megbrain/imperative/subgraph.h @@ -96,7 +96,7 @@ struct Subgraph { bool operator==(const Subgraph& rhs) const; }; -struct EncodedSubraph { +struct EncodedSubgraph { Subgraph graph; SmallVector input_mask; SmallVector output_mask; @@ -146,8 +146,8 @@ struct EncodedSubraph { return decoded_outputs; } - static EncodedSubraph make(Subgraph graph) { - EncodedSubraph result; + static EncodedSubgraph make(Subgraph graph) { + EncodedSubgraph result; result.input_mask = graph.gen_input_mask(); result.output_mask = graph.gen_output_mask(); graph.inputs = result.encode_inputs(graph.inputs); @@ -156,11 +156,11 @@ struct EncodedSubraph { return result; } - static EncodedSubraph make_single( + static EncodedSubgraph make_single( std::shared_ptr op, SmallVector input_mask, SmallVector output_mask) { - EncodedSubraph result; + EncodedSubgraph result; result.input_mask = input_mask; result.output_mask = output_mask; Subgraph::var_t last_var = 0; diff --git a/imperative/src/include/megbrain/imperative/subgraph_detail.h b/imperative/src/include/megbrain/imperative/subgraph_detail.h index d0cb6641c..0583650c6 100644 --- a/imperative/src/include/megbrain/imperative/subgraph_detail.h +++ b/imperative/src/include/megbrain/imperative/subgraph_detail.h @@ -24,7 +24,7 @@ apply_on_physical_tensor(const OpDef& def, std::tuple, bool> infer_output_attrs_fallible(const OpDef& def, const SmallVector& inputs); -EncodedSubraph +EncodedSubgraph make_backward_graph(const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, @@ -35,7 +35,7 @@ apply_on_var_node( const OpDef& def, const VarNodeArray& inputs); -EncodedSubraph make_backward_graph( +EncodedSubgraph make_backward_graph( const OpDef& def, const SmallVector& inputs, const SmallVector& input_requires_grad, diff --git a/imperative/src/test/backward_graph.cpp b/imperative/src/test/backward_graph.cpp index 2d0e972d6..350fadbbd 100644 --- a/imperative/src/test/backward_graph.cpp +++ b/imperative/src/test/backward_graph.cpp @@ -22,7 +22,7 @@ using namespace cg; using namespace imperative; template -T prepare_backward_graph_inputs(const EncodedSubraph& bg, const T& inputs, +T prepare_backward_graph_inputs(const EncodedSubgraph& bg, const T& inputs, const T& outputs, const T& grads) { T ret; size_t i = 0; diff --git a/lite/pylite/pylite.md b/lite/pylite/pylite.md index bccaf7fee..12ab761ee 100755 --- a/lite/pylite/pylite.md +++ b/lite/pylite/pylite.md @@ -143,7 +143,7 @@ LiteNetwork 主要为用户提供模型载入,运行等功能。使用的模 * CPU 基本模型载入运行的 example ``` def test_network_basic(): - source_dir = os.getenv("LITE_TEST_RESOUCE") + source_dir = os.getenv("LITE_TEST_RESOURCE") input_data_path = os.path.join(source_dir, "input_data.npy") # read input to input_data input_data = np.load(input_data_path) @@ -176,7 +176,7 @@ def test_network_basic(): * CUDA 上使用 device 内存作为模型输入,需要在构造 network 候配置 config 和 IO 信息 ``` def test_network_device_IO(): - source_dir = os.getenv("LITE_TEST_RESOUCE") + source_dir = os.getenv("LITE_TEST_RESOURCE") input_data_path = os.path.join(source_dir, "input_data.npy") model_path = os.path.join(source_dir, "shufflenet.mge") # read input to input_data diff --git a/lite/pylite/test/test_global.py b/lite/pylite/test/test_global.py index 3fc35dd94..d0bcfd4cc 100644 --- a/lite/pylite/test/test_global.py +++ b/lite/pylite/test/test_global.py @@ -18,7 +18,7 @@ set_log_level(2) class TestShuffleNet(unittest.TestCase): - source_dir = os.getenv("LITE_TEST_RESOUCE") + source_dir = os.getenv("LITE_TEST_RESOURCE") input_data_path = os.path.join(source_dir, "input_data.npy") correct_data_path = os.path.join(source_dir, "output_data.npy") correct_data = np.load(correct_data_path).flatten() diff --git a/lite/pylite/test/test_network.py b/lite/pylite/test/test_network.py index 44b3ae7cd..f445a35fa 100644 --- a/lite/pylite/test/test_network.py +++ b/lite/pylite/test/test_network.py @@ -52,7 +52,7 @@ def test_network_io(): class TestShuffleNet(unittest.TestCase): - source_dir = os.getenv("LITE_TEST_RESOUCE") + source_dir = os.getenv("LITE_TEST_RESOURCE") input_data_path = os.path.join(source_dir, "input_data.npy") correct_data_path = os.path.join(source_dir, "output_data.npy") model_path = os.path.join(source_dir, "shufflenet.mge") diff --git a/lite/pylite/test/test_network_cuda.py b/lite/pylite/test/test_network_cuda.py index e92eab2f9..a5b2ac839 100644 --- a/lite/pylite/test/test_network_cuda.py +++ b/lite/pylite/test/test_network_cuda.py @@ -33,7 +33,7 @@ def require_cuda(ngpu=1): class TestShuffleNetCuda(unittest.TestCase): - source_dir = os.getenv("LITE_TEST_RESOUCE") + source_dir = os.getenv("LITE_TEST_RESOURCE") input_data_path = os.path.join(source_dir, "input_data.npy") correct_data_path = os.path.join(source_dir, "output_data.npy") model_path = os.path.join(source_dir, "shufflenet.mge") -- GitLab