From 2e6de4ced9141864cbfba1cf8f403dbb28363e54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=BC=A0=E6=98=A5=E4=B9=94?= <83450930+Liyulingyue@users.noreply.github.com> Date: Thu, 29 Jun 2023 15:13:39 +0800 Subject: [PATCH] [CodeStyle][CINN] fix end of file and trailing whitespace (#54955) * cinn 5 6 * roll back something * roll back something * fix codestyle of cinn * revert symlink changes --------- Co-authored-by: SigureMo --- .../search_space/block_sampler.cc | 65 ++++++++++++------- .../search_space/rule_sampler.cc | 31 +++++---- .../frontend/op_mappers/science/broadcast.cc | 39 ++++++----- paddle/cinn/hlir/pass/dot_merger_test.cc | 54 ++++++++------- paddle/cinn/lang/README.md | 6 +- paddle/cinn/pybind/utils.cc | 6 +- paddle/cinn/utils/profiler_test.cc | 10 +-- tools/cinn/codestyle/.gitignore | 2 +- 8 files changed, 128 insertions(+), 85 deletions(-) diff --git a/paddle/cinn/auto_schedule/search_space/block_sampler.cc b/paddle/cinn/auto_schedule/search_space/block_sampler.cc index 85bad94baea..26b00d3a89f 100644 --- a/paddle/cinn/auto_schedule/search_space/block_sampler.cc +++ b/paddle/cinn/auto_schedule/search_space/block_sampler.cc @@ -21,36 +21,48 @@ namespace cinn { namespace auto_schedule { -std::unique_ptr BlockSampler::Make(const std::vector& all_blocks, - bool default_remove_policy, - const std::string& strategy, - utils::LinearRandomEngine::StateType rand_seed, - const std::vector& weights) { +std::unique_ptr BlockSampler::Make( + const std::vector& all_blocks, + bool default_remove_policy, + const std::string& strategy, + utils::LinearRandomEngine::StateType rand_seed, + const std::vector& weights) { CHECK_GT(all_blocks.size(), 0) << "Empty block list"; if (strategy == "traversal") { - VLOG(6) << "Init TraversalBlockSampler with block num = " << all_blocks.size(); - return std::make_unique(all_blocks, default_remove_policy); + VLOG(6) << "Init TraversalBlockSampler with block num = " + << all_blocks.size(); + return std::make_unique(all_blocks, + default_remove_policy); } else if (strategy == "probabilistic") { - VLOG(6) << "Init ProbabilisticBlockSampler with block num = " << all_blocks.size(); - return std::make_unique(all_blocks, default_remove_policy, rand_seed, weights); + VLOG(6) << "Init ProbabilisticBlockSampler with block num = " + << all_blocks.size(); + return std::make_unique( + all_blocks, default_remove_policy, rand_seed, weights); } LOG(FATAL) << "Unimplemented strategy:" << strategy; return nullptr; } -BlockSampler::BlockSampler(const std::vector& all_blocks, bool default_remove_policy) { +BlockSampler::BlockSampler(const std::vector& all_blocks, + bool default_remove_policy) { default_remove_policy_ = default_remove_policy; - std::transform(all_blocks.begin(), all_blocks.end(), std::back_inserter(all_blocks_), [](const ir::Expr& block_expr) { - const ir::ScheduleBlockRealize* block_realize = block_expr.As(); - const ir::ScheduleBlock* block = block_realize->schedule_block.As(); - return block->name; - }); + std::transform(all_blocks.begin(), + all_blocks.end(), + std::back_inserter(all_blocks_), + [](const ir::Expr& block_expr) { + const ir::ScheduleBlockRealize* block_realize = + block_expr.As(); + const ir::ScheduleBlock* block = + block_realize->schedule_block.As(); + return block->name; + }); } std::string TraversalBlockSampler::NextBlock(bool remove) { if (cur_idx_ < all_blocks_.size()) { - VLOG(6) << "[TraversalBlockSampler] next block: " << all_blocks_.at(cur_idx_); + VLOG(6) << "[TraversalBlockSampler] next block: " + << all_blocks_.at(cur_idx_); std::string block_name = all_blocks_.at(cur_idx_); if (remove) { ++cur_idx_; @@ -62,11 +74,14 @@ std::string TraversalBlockSampler::NextBlock(bool remove) { return ""; } -ProbabilisticBlockSampler::ProbabilisticBlockSampler(const std::vector& all_blocks, - bool default_remove_policy, - utils::LinearRandomEngine::StateType rand_seed, - const std::vector& weights) - : BlockSampler(all_blocks, default_remove_policy), weights_(weights), rand_seed_(rand_seed) { +ProbabilisticBlockSampler::ProbabilisticBlockSampler( + const std::vector& all_blocks, + bool default_remove_policy, + utils::LinearRandomEngine::StateType rand_seed, + const std::vector& weights) + : BlockSampler(all_blocks, default_remove_policy), + weights_(weights), + rand_seed_(rand_seed) { if (weights.empty()) { weights_.resize(all_blocks.size(), 1); } else { @@ -79,14 +94,16 @@ std::string ProbabilisticBlockSampler::NextBlock(bool remove) { if (remains_ == 0) { return ""; } - int block_idx = utils::SampleDiscreteFromDistribution(weights_, &rand_seed_); + int block_idx = + utils::SampleDiscreteFromDistribution(weights_, &rand_seed_); if (remove) { weights_[block_idx] = 0; --remains_; } - VLOG(6) << "[ProbabilisticBlockSampler] next block: " << all_blocks_.at(block_idx); + VLOG(6) << "[ProbabilisticBlockSampler] next block: " + << all_blocks_.at(block_idx); return all_blocks_.at(block_idx); } } // namespace auto_schedule -} // namespace cinn \ No newline at end of file +} // namespace cinn diff --git a/paddle/cinn/auto_schedule/search_space/rule_sampler.cc b/paddle/cinn/auto_schedule/search_space/rule_sampler.cc index f9d2f47c475..500ae91deb8 100644 --- a/paddle/cinn/auto_schedule/search_space/rule_sampler.cc +++ b/paddle/cinn/auto_schedule/search_space/rule_sampler.cc @@ -20,16 +20,19 @@ namespace cinn { namespace auto_schedule { -std::unique_ptr RuleSampler::Make(const std::vector& potential_rules, - bool default_remove_policy, - const std::string& strategy, - utils::LinearRandomEngine::StateType rand_seed, - const std::vector& weights) { +std::unique_ptr RuleSampler::Make( + const std::vector& potential_rules, + bool default_remove_policy, + const std::string& strategy, + utils::LinearRandomEngine::StateType rand_seed, + const std::vector& weights) { CHECK_GT(potential_rules.size(), 0) << "Empty rule list"; if (strategy == "traversal") { - return std::make_unique(potential_rules, default_remove_policy); + return std::make_unique(potential_rules, + default_remove_policy); } else if (strategy == "probabilistic") { - return std::make_unique(potential_rules, default_remove_policy, rand_seed, weights); + return std::make_unique( + potential_rules, default_remove_policy, rand_seed, weights); } LOG(FATAL) << "Unimplemented strategy:" << strategy; @@ -48,10 +51,11 @@ AutoGenRule* TraversalRuleSampler::NextRule(bool remove) { return nullptr; } -ProbabilisticRuleSampler::ProbabilisticRuleSampler(const std::vector& potential_rules, - bool default_remove_policy, - utils::LinearRandomEngine::StateType rand_seed, - const std::vector& weights) +ProbabilisticRuleSampler::ProbabilisticRuleSampler( + const std::vector& potential_rules, + bool default_remove_policy, + utils::LinearRandomEngine::StateType rand_seed, + const std::vector& weights) : RuleSampler(potential_rules, default_remove_policy), weights_(weights), rand_seed_(utils::LinearRandomEngine::NormalizeState(rand_seed)) { @@ -67,7 +71,8 @@ AutoGenRule* ProbabilisticRuleSampler::NextRule(bool remove) { if (remains_ == 0) { return nullptr; } - int rule_idx = utils::SampleDiscreteFromDistribution(weights_, &rand_seed_); + int rule_idx = + utils::SampleDiscreteFromDistribution(weights_, &rand_seed_); if (remove) { weights_[rule_idx] = 0; --remains_; @@ -77,4 +82,4 @@ AutoGenRule* ProbabilisticRuleSampler::NextRule(bool remove) { } } // namespace auto_schedule -} // namespace cinn \ No newline at end of file +} // namespace cinn diff --git a/paddle/cinn/frontend/op_mappers/science/broadcast.cc b/paddle/cinn/frontend/op_mappers/science/broadcast.cc index b19f9830bec..f5b3f9cd20f 100644 --- a/paddle/cinn/frontend/op_mappers/science/broadcast.cc +++ b/paddle/cinn/frontend/op_mappers/science/broadcast.cc @@ -20,20 +20,23 @@ namespace cinn { namespace frontend { namespace science_mappers { -void FillConstantOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperContext& ctx) { +void FillConstantOpMapper(const paddle::cpp::OpDesc& op_desc, + const OpMapperContext& ctx) { CHECK_EQ(op_desc.Output("Y").size(), 1UL); auto y_name = op_desc.Output("Y").front(); - auto shape = utils::ToShapeType(utils::GetAttrOrDefault>(op_desc, "shape")); + auto shape = utils::ToShapeType( + utils::GetAttrOrDefault>(op_desc, "shape")); auto value = utils::GetAttrOrDefault(op_desc, "value", 0.0f); - auto dtype_id = utils::GetAttrOrDefault(op_desc, "dtype", static_cast(paddle::cpp::VarDescAPI::Type::FP32)); + auto dtype_id = utils::GetAttrOrDefault( + op_desc, "dtype", static_cast(paddle::cpp::VarDescAPI::Type::FP32)); auto dtype_pd = static_cast(dtype_id); auto dtype_cinn = utils::CppVarType2CommonType(dtype_pd); - auto dtype = common::Type2Str(dtype_cinn); + auto dtype = common::Type2Str(dtype_cinn); - VLOG(4) << "fill constant (" << value << ") with shape (" << cinn::utils::Join(shape, ",") << ") and dtype [" << dtype - << "]"; + VLOG(4) << "fill constant (" << value << ") with shape (" + << cinn::utils::Join(shape, ",") << ") and dtype [" << dtype << "]"; const auto& cinn_name = cinn::utils::TransValidVarName(y_name); CheckVarNameValid(cinn_name); @@ -44,19 +47,23 @@ void FillConstantOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperCont ctx.AddVarModelToProgram(y_name, out->id); } -void BroadcastOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperContext& ctx) { +void BroadcastOpMapper(const paddle::cpp::OpDesc& op_desc, + const OpMapperContext& ctx) { CHECK_EQ(op_desc.Input("X").size(), 1UL); auto x_name = op_desc.Input("X").front(); CHECK_EQ(op_desc.Output("Y").size(), 1UL); auto y_name = op_desc.Output("Y").front(); - CHECK(op_desc.HasAttr("shape")) << "The broadcast_p operator should has 'shape' attribute, but " << x_name - << "'s broadcast hasn't."; + CHECK(op_desc.HasAttr("shape")) + << "The broadcast_p operator should has 'shape' attribute, but " << x_name + << "'s broadcast hasn't."; - auto y_shape = utils::ToShapeType(utils::GetAttrOrDefault>(op_desc, "shape")); - auto x = ctx.GetVar(x_name); + auto y_shape = utils::ToShapeType( + utils::GetAttrOrDefault>(op_desc, "shape")); + auto x = ctx.GetVar(x_name); - VLOG(4) << "Broadcast " << x_name << " from shape (" << cinn::utils::Join(x->shape, ",") << ") to shape (" + VLOG(4) << "Broadcast " << x_name << " from shape (" + << cinn::utils::Join(x->shape, ",") << ") to shape (" << cinn::utils::Join(y_shape, ",") << ")."; auto out = ctx.Builder()->BroadcastTo(x, y_shape); @@ -70,8 +77,10 @@ void BroadcastOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperContext } // namespace cinn CINN_REGISTER_HELPER(science_broadcast) { - CINN_REGISTER_OP_MAPPER(fill_constant_p, cinn::frontend::science_mappers::FillConstantOpMapper) - CINN_REGISTER_OP_MAPPER(broadcast_p, cinn::frontend::science_mappers::BroadcastOpMapper) + CINN_REGISTER_OP_MAPPER(fill_constant_p, + cinn::frontend::science_mappers::FillConstantOpMapper) + CINN_REGISTER_OP_MAPPER(broadcast_p, + cinn::frontend::science_mappers::BroadcastOpMapper) return true; -} \ No newline at end of file +} diff --git a/paddle/cinn/hlir/pass/dot_merger_test.cc b/paddle/cinn/hlir/pass/dot_merger_test.cc index 1f5f0104dbd..17258623f0d 100644 --- a/paddle/cinn/hlir/pass/dot_merger_test.cc +++ b/paddle/cinn/hlir/pass/dot_merger_test.cc @@ -19,7 +19,9 @@ namespace cinn { namespace frontend { -int GetSize(std::vector& shape) { return std::accumulate(shape.begin(), shape.end(), 1, std::multiplies()); } +int GetSize(std::vector& shape) { + return std::accumulate(shape.begin(), shape.end(), 1, std::multiplies()); +} void RunModelTest(Program& program, const std::vector&& inputs, @@ -28,13 +30,17 @@ void RunModelTest(Program& program, std::vector> inputs_data; for (auto input : inputs) { inputs_data.emplace_back(GetSize(input->shape)); - InitRandomVector(&inputs_data.back(), inputs_data.back().size(), 0.0f, 1.0f, 1e-3); + InitRandomVector( + &inputs_data.back(), inputs_data.back().size(), 0.0f, 1.0f, 1e-3); } auto target = common::DefaultTarget(); - std::unordered_map, std::vector>> outputs; + std::unordered_map, std::vector>> + outputs; { - auto graph = std::make_shared(program, fetch_ids, target); + auto graph = + std::make_shared(program, fetch_ids, target); hlir::framework::ApplyPass(graph.get(), "OpFusionPass"); hlir::framework::ApplyPass(graph.get(), "FusionMergePass"); @@ -45,7 +51,7 @@ void RunModelTest(Program& program, for (int idx = 0; idx < inputs.size(); ++idx) { scope->Var(inputs[idx]->id); auto tensor = scope->GetTensor(inputs[idx]->id); - auto* data = tensor->mutable_data(target); + auto* data = tensor->mutable_data(target); CopyFromVector(inputs_data[idx], tensor, target); } run_program->Execute(); @@ -53,11 +59,13 @@ void RunModelTest(Program& program, auto tensor = scope->GetTensor(id); std::vector data(tensor->shape().numel()); CopyToVector(tensor, &data); - outputs[id] = std::pair, std::vector>(data, std::vector()); + outputs[id] = std::pair, std::vector>( + data, std::vector()); } } { - auto graph = std::make_shared(program, fetch_ids, target); + auto graph = + std::make_shared(program, fetch_ids, target); hlir::framework::ApplyPass(graph.get(), "DotMerger"); hlir::framework::ApplyPass(graph.get(), "OpFusionPass"); hlir::framework::ApplyPass(graph.get(), "FusionMergePass"); @@ -69,7 +77,7 @@ void RunModelTest(Program& program, for (int idx = 0; idx < inputs.size(); ++idx) { scope->Var(inputs[idx]->id); auto tensor = scope->GetTensor(inputs[idx]->id); - auto* data = tensor->mutable_data(target); + auto* data = tensor->mutable_data(target); CopyFromVector(inputs_data[idx], tensor, target); } run_program->Execute(); @@ -89,24 +97,24 @@ void RunModelTest(Program& program, TEST(DotMerger, Test_dot_merger0) { int m = 2, k = 1024, n = 100, n1 = 100, n2 = 100, axis = 1; NetBuilder net_builder("Test_dot_merger0"); - auto A = net_builder.CreateInput(Float(32), {m, k}, "A"); - auto B = net_builder.CreateInput(Float(32), {k, n1}, "B"); - auto C = net_builder.CreateInput(Float(32), {k, n2}, "C"); - auto D = net_builder.CreateInput(Float(32), {n1, k}, "D"); - auto E = net_builder.CreateInput(Float(32), {n2, k}, "E"); - auto F = net_builder.CreateInput(Float(32), {k, n}, "F"); - auto G = net_builder.Matmul(A, B); - auto H = net_builder.Matmul(A, C); - auto G1 = net_builder.Matmul(D, F); - auto H1 = net_builder.Matmul(E, F); - auto G2 = net_builder.Concat({G, H}, axis); - auto H2 = net_builder.Concat({G1, H1}, (1 - axis)); - auto F1 = net_builder.Matmul(G2, H2); + auto A = net_builder.CreateInput(Float(32), {m, k}, "A"); + auto B = net_builder.CreateInput(Float(32), {k, n1}, "B"); + auto C = net_builder.CreateInput(Float(32), {k, n2}, "C"); + auto D = net_builder.CreateInput(Float(32), {n1, k}, "D"); + auto E = net_builder.CreateInput(Float(32), {n2, k}, "E"); + auto F = net_builder.CreateInput(Float(32), {k, n}, "F"); + auto G = net_builder.Matmul(A, B); + auto H = net_builder.Matmul(A, C); + auto G1 = net_builder.Matmul(D, F); + auto H1 = net_builder.Matmul(E, F); + auto G2 = net_builder.Concat({G, H}, axis); + auto H2 = net_builder.Concat({G1, H1}, (1 - axis)); + auto F1 = net_builder.Matmul(G2, H2); auto fetch_ids = {F1->id}; - auto program = net_builder.Build(); + auto program = net_builder.Build(); std::cout << "RunModelTest" << std::endl; RunModelTest(program, {A, B, C, D, E, F}, fetch_ids); } } // namespace frontend -} // namespace cinn \ No newline at end of file +} // namespace cinn diff --git a/paddle/cinn/lang/README.md b/paddle/cinn/lang/README.md index ebbb2ca579f..5f00868e61d 100644 --- a/paddle/cinn/lang/README.md +++ b/paddle/cinn/lang/README.md @@ -1,5 +1,5 @@ # Design of CINN/DSL -This module is a simple DSL defined in CINN project. +This module is a simple DSL defined in CINN project. The DSL module aims to represent the overall computation in a hardware indenpendent way. ## Concepts @@ -78,14 +78,14 @@ A matrix multiplication Var i, j, k; Placeholder A({M, K}), B({K, N}); -Tensor C = Compute({M, N}/*output shape*/, +Tensor C = Compute({M, N}/*output shape*/, [](Var i, Var j) { return ReduceSum(A(i,k) * B(k, j), k); }, "C"); Tensor D = Compute({M, N}, [](Var i, Var j) { return Map(C(i,j) + 1); }); - + Schedule s = CreateSchedule(C); auto func = Build(s, [A, B, C], target=target, name="matmul"); diff --git a/paddle/cinn/pybind/utils.cc b/paddle/cinn/pybind/utils.cc index dcae237d1eb..930ce191b45 100644 --- a/paddle/cinn/pybind/utils.cc +++ b/paddle/cinn/pybind/utils.cc @@ -54,7 +54,9 @@ void BindUtils(py::module *m) { .def(py::init()) .def_property( "annotation", - [](HostEvent &self) -> const std::string & { return self.annotation_; }, + [](HostEvent &self) -> const std::string & { + return self.annotation_; + }, [](HostEvent &self, const std::string &v) { self.annotation_ = v; }) .def_property( "duration", @@ -67,4 +69,4 @@ void BindUtils(py::module *m) { } } // namespace pybind -} // namespace cinn \ No newline at end of file +} // namespace cinn diff --git a/paddle/cinn/utils/profiler_test.cc b/paddle/cinn/utils/profiler_test.cc index 8a5951a1525..8c69b512eea 100644 --- a/paddle/cinn/utils/profiler_test.cc +++ b/paddle/cinn/utils/profiler_test.cc @@ -26,8 +26,10 @@ TEST(RecordEvent, HOST) { ProfilerHelper::EnableCPU(); LOG(INFO) << "Usage 1: RecordEvent for HOST"; - std::vector types = { - EventType::kOrdinary, EventType::kCompile, EventType::kCompile, EventType::kInstruction}; + std::vector types = {EventType::kOrdinary, + EventType::kCompile, + EventType::kCompile, + EventType::kInstruction}; for (int i = 0; i < 4; ++i) { std::string name = "evs_op_" + std::to_string(i); RecordEvent record_event(name, types[i]); @@ -38,7 +40,7 @@ TEST(RecordEvent, HOST) { auto &events = HostEventRecorder::GetInstance().Events(); EXPECT_EQ(events.size(), 4U); for (int i = 0; i < 4; ++i) { - auto &event = events[i]; + auto &event = events[i]; std::string name = "evs_op_" + std::to_string(i); EXPECT_EQ(event.annotation_, name); EXPECT_GT(event.duration_, 0.0); @@ -75,4 +77,4 @@ TEST(RecordEvent, HOST) { } } EXPECT_EQ(events.size(), 8U); -} \ No newline at end of file +} diff --git a/tools/cinn/codestyle/.gitignore b/tools/cinn/codestyle/.gitignore index f3efb4c8433..774bffd2522 100644 --- a/tools/cinn/codestyle/.gitignore +++ b/tools/cinn/codestyle/.gitignore @@ -1,3 +1,3 @@ *.pyc *.html -*.json \ No newline at end of file +*.json -- GitLab