未验证 提交 2e6de4ce 编写于 作者: 张春乔 提交者: GitHub

[CodeStyle][CINN] fix end of file and trailing whitespace (#54955)

* cinn 5 6

* roll back something

* roll back something

* fix codestyle of cinn

* revert symlink changes

---------
Co-authored-by: NSigureMo <sigure.qaq@gmail.com>
上级 865b6548
...@@ -21,36 +21,48 @@ ...@@ -21,36 +21,48 @@
namespace cinn { namespace cinn {
namespace auto_schedule { namespace auto_schedule {
std::unique_ptr<BlockSampler> BlockSampler::Make(const std::vector<ir::Expr>& all_blocks, std::unique_ptr<BlockSampler> BlockSampler::Make(
const std::vector<ir::Expr>& all_blocks,
bool default_remove_policy, bool default_remove_policy,
const std::string& strategy, const std::string& strategy,
utils::LinearRandomEngine::StateType rand_seed, utils::LinearRandomEngine::StateType rand_seed,
const std::vector<int>& weights) { const std::vector<int>& weights) {
CHECK_GT(all_blocks.size(), 0) << "Empty block list"; CHECK_GT(all_blocks.size(), 0) << "Empty block list";
if (strategy == "traversal") { if (strategy == "traversal") {
VLOG(6) << "Init TraversalBlockSampler with block num = " << all_blocks.size(); VLOG(6) << "Init TraversalBlockSampler with block num = "
return std::make_unique<TraversalBlockSampler>(all_blocks, default_remove_policy); << all_blocks.size();
return std::make_unique<TraversalBlockSampler>(all_blocks,
default_remove_policy);
} else if (strategy == "probabilistic") { } else if (strategy == "probabilistic") {
VLOG(6) << "Init ProbabilisticBlockSampler with block num = " << all_blocks.size(); VLOG(6) << "Init ProbabilisticBlockSampler with block num = "
return std::make_unique<ProbabilisticBlockSampler>(all_blocks, default_remove_policy, rand_seed, weights); << all_blocks.size();
return std::make_unique<ProbabilisticBlockSampler>(
all_blocks, default_remove_policy, rand_seed, weights);
} }
LOG(FATAL) << "Unimplemented strategy:" << strategy; LOG(FATAL) << "Unimplemented strategy:" << strategy;
return nullptr; return nullptr;
} }
BlockSampler::BlockSampler(const std::vector<ir::Expr>& all_blocks, bool default_remove_policy) { BlockSampler::BlockSampler(const std::vector<ir::Expr>& all_blocks,
bool default_remove_policy) {
default_remove_policy_ = default_remove_policy; default_remove_policy_ = default_remove_policy;
std::transform(all_blocks.begin(), all_blocks.end(), std::back_inserter(all_blocks_), [](const ir::Expr& block_expr) { std::transform(all_blocks.begin(),
const ir::ScheduleBlockRealize* block_realize = block_expr.As<ir::ScheduleBlockRealize>(); all_blocks.end(),
const ir::ScheduleBlock* block = block_realize->schedule_block.As<ir::ScheduleBlock>(); std::back_inserter(all_blocks_),
[](const ir::Expr& block_expr) {
const ir::ScheduleBlockRealize* block_realize =
block_expr.As<ir::ScheduleBlockRealize>();
const ir::ScheduleBlock* block =
block_realize->schedule_block.As<ir::ScheduleBlock>();
return block->name; return block->name;
}); });
} }
std::string TraversalBlockSampler::NextBlock(bool remove) { std::string TraversalBlockSampler::NextBlock(bool remove) {
if (cur_idx_ < all_blocks_.size()) { if (cur_idx_ < all_blocks_.size()) {
VLOG(6) << "[TraversalBlockSampler] next block: " << all_blocks_.at(cur_idx_); VLOG(6) << "[TraversalBlockSampler] next block: "
<< all_blocks_.at(cur_idx_);
std::string block_name = all_blocks_.at(cur_idx_); std::string block_name = all_blocks_.at(cur_idx_);
if (remove) { if (remove) {
++cur_idx_; ++cur_idx_;
...@@ -62,11 +74,14 @@ std::string TraversalBlockSampler::NextBlock(bool remove) { ...@@ -62,11 +74,14 @@ std::string TraversalBlockSampler::NextBlock(bool remove) {
return ""; return "";
} }
ProbabilisticBlockSampler::ProbabilisticBlockSampler(const std::vector<ir::Expr>& all_blocks, ProbabilisticBlockSampler::ProbabilisticBlockSampler(
const std::vector<ir::Expr>& all_blocks,
bool default_remove_policy, bool default_remove_policy,
utils::LinearRandomEngine::StateType rand_seed, utils::LinearRandomEngine::StateType rand_seed,
const std::vector<int>& weights) const std::vector<int>& weights)
: BlockSampler(all_blocks, default_remove_policy), weights_(weights), rand_seed_(rand_seed) { : BlockSampler(all_blocks, default_remove_policy),
weights_(weights),
rand_seed_(rand_seed) {
if (weights.empty()) { if (weights.empty()) {
weights_.resize(all_blocks.size(), 1); weights_.resize(all_blocks.size(), 1);
} else { } else {
...@@ -79,12 +94,14 @@ std::string ProbabilisticBlockSampler::NextBlock(bool remove) { ...@@ -79,12 +94,14 @@ std::string ProbabilisticBlockSampler::NextBlock(bool remove) {
if (remains_ == 0) { if (remains_ == 0) {
return ""; return "";
} }
int block_idx = utils::SampleDiscreteFromDistribution<int>(weights_, &rand_seed_); int block_idx =
utils::SampleDiscreteFromDistribution<int>(weights_, &rand_seed_);
if (remove) { if (remove) {
weights_[block_idx] = 0; weights_[block_idx] = 0;
--remains_; --remains_;
} }
VLOG(6) << "[ProbabilisticBlockSampler] next block: " << all_blocks_.at(block_idx); VLOG(6) << "[ProbabilisticBlockSampler] next block: "
<< all_blocks_.at(block_idx);
return all_blocks_.at(block_idx); return all_blocks_.at(block_idx);
} }
......
...@@ -20,16 +20,19 @@ ...@@ -20,16 +20,19 @@
namespace cinn { namespace cinn {
namespace auto_schedule { namespace auto_schedule {
std::unique_ptr<RuleSampler> RuleSampler::Make(const std::vector<AutoGenRule*>& potential_rules, std::unique_ptr<RuleSampler> RuleSampler::Make(
const std::vector<AutoGenRule*>& potential_rules,
bool default_remove_policy, bool default_remove_policy,
const std::string& strategy, const std::string& strategy,
utils::LinearRandomEngine::StateType rand_seed, utils::LinearRandomEngine::StateType rand_seed,
const std::vector<int>& weights) { const std::vector<int>& weights) {
CHECK_GT(potential_rules.size(), 0) << "Empty rule list"; CHECK_GT(potential_rules.size(), 0) << "Empty rule list";
if (strategy == "traversal") { if (strategy == "traversal") {
return std::make_unique<TraversalRuleSampler>(potential_rules, default_remove_policy); return std::make_unique<TraversalRuleSampler>(potential_rules,
default_remove_policy);
} else if (strategy == "probabilistic") { } else if (strategy == "probabilistic") {
return std::make_unique<ProbabilisticRuleSampler>(potential_rules, default_remove_policy, rand_seed, weights); return std::make_unique<ProbabilisticRuleSampler>(
potential_rules, default_remove_policy, rand_seed, weights);
} }
LOG(FATAL) << "Unimplemented strategy:" << strategy; LOG(FATAL) << "Unimplemented strategy:" << strategy;
...@@ -48,7 +51,8 @@ AutoGenRule* TraversalRuleSampler::NextRule(bool remove) { ...@@ -48,7 +51,8 @@ AutoGenRule* TraversalRuleSampler::NextRule(bool remove) {
return nullptr; return nullptr;
} }
ProbabilisticRuleSampler::ProbabilisticRuleSampler(const std::vector<AutoGenRule*>& potential_rules, ProbabilisticRuleSampler::ProbabilisticRuleSampler(
const std::vector<AutoGenRule*>& potential_rules,
bool default_remove_policy, bool default_remove_policy,
utils::LinearRandomEngine::StateType rand_seed, utils::LinearRandomEngine::StateType rand_seed,
const std::vector<int>& weights) const std::vector<int>& weights)
...@@ -67,7 +71,8 @@ AutoGenRule* ProbabilisticRuleSampler::NextRule(bool remove) { ...@@ -67,7 +71,8 @@ AutoGenRule* ProbabilisticRuleSampler::NextRule(bool remove) {
if (remains_ == 0) { if (remains_ == 0) {
return nullptr; return nullptr;
} }
int rule_idx = utils::SampleDiscreteFromDistribution<int>(weights_, &rand_seed_); int rule_idx =
utils::SampleDiscreteFromDistribution<int>(weights_, &rand_seed_);
if (remove) { if (remove) {
weights_[rule_idx] = 0; weights_[rule_idx] = 0;
--remains_; --remains_;
......
...@@ -20,20 +20,23 @@ namespace cinn { ...@@ -20,20 +20,23 @@ namespace cinn {
namespace frontend { namespace frontend {
namespace science_mappers { namespace science_mappers {
void FillConstantOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperContext& ctx) { void FillConstantOpMapper(const paddle::cpp::OpDesc& op_desc,
const OpMapperContext& ctx) {
CHECK_EQ(op_desc.Output("Y").size(), 1UL); CHECK_EQ(op_desc.Output("Y").size(), 1UL);
auto y_name = op_desc.Output("Y").front(); auto y_name = op_desc.Output("Y").front();
auto shape = utils::ToShapeType(utils::GetAttrOrDefault<std::vector<int64_t>>(op_desc, "shape")); auto shape = utils::ToShapeType(
utils::GetAttrOrDefault<std::vector<int64_t>>(op_desc, "shape"));
auto value = utils::GetAttrOrDefault<float>(op_desc, "value", 0.0f); auto value = utils::GetAttrOrDefault<float>(op_desc, "value", 0.0f);
auto dtype_id = utils::GetAttrOrDefault<int>(op_desc, "dtype", static_cast<int>(paddle::cpp::VarDescAPI::Type::FP32)); auto dtype_id = utils::GetAttrOrDefault<int>(
op_desc, "dtype", static_cast<int>(paddle::cpp::VarDescAPI::Type::FP32));
auto dtype_pd = static_cast<paddle::cpp::VarDescAPI::Type>(dtype_id); auto dtype_pd = static_cast<paddle::cpp::VarDescAPI::Type>(dtype_id);
auto dtype_cinn = utils::CppVarType2CommonType(dtype_pd); auto dtype_cinn = utils::CppVarType2CommonType(dtype_pd);
auto dtype = common::Type2Str(dtype_cinn); auto dtype = common::Type2Str(dtype_cinn);
VLOG(4) << "fill constant (" << value << ") with shape (" << cinn::utils::Join(shape, ",") << ") and dtype [" << dtype VLOG(4) << "fill constant (" << value << ") with shape ("
<< "]"; << cinn::utils::Join(shape, ",") << ") and dtype [" << dtype << "]";
const auto& cinn_name = cinn::utils::TransValidVarName(y_name); const auto& cinn_name = cinn::utils::TransValidVarName(y_name);
CheckVarNameValid(cinn_name); CheckVarNameValid(cinn_name);
...@@ -44,19 +47,23 @@ void FillConstantOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperCont ...@@ -44,19 +47,23 @@ void FillConstantOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperCont
ctx.AddVarModelToProgram(y_name, out->id); ctx.AddVarModelToProgram(y_name, out->id);
} }
void BroadcastOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperContext& ctx) { void BroadcastOpMapper(const paddle::cpp::OpDesc& op_desc,
const OpMapperContext& ctx) {
CHECK_EQ(op_desc.Input("X").size(), 1UL); CHECK_EQ(op_desc.Input("X").size(), 1UL);
auto x_name = op_desc.Input("X").front(); auto x_name = op_desc.Input("X").front();
CHECK_EQ(op_desc.Output("Y").size(), 1UL); CHECK_EQ(op_desc.Output("Y").size(), 1UL);
auto y_name = op_desc.Output("Y").front(); auto y_name = op_desc.Output("Y").front();
CHECK(op_desc.HasAttr("shape")) << "The broadcast_p operator should has 'shape' attribute, but " << x_name CHECK(op_desc.HasAttr("shape"))
<< "The broadcast_p operator should has 'shape' attribute, but " << x_name
<< "'s broadcast hasn't."; << "'s broadcast hasn't.";
auto y_shape = utils::ToShapeType(utils::GetAttrOrDefault<std::vector<int64_t>>(op_desc, "shape")); auto y_shape = utils::ToShapeType(
utils::GetAttrOrDefault<std::vector<int64_t>>(op_desc, "shape"));
auto x = ctx.GetVar(x_name); auto x = ctx.GetVar(x_name);
VLOG(4) << "Broadcast " << x_name << " from shape (" << cinn::utils::Join(x->shape, ",") << ") to shape (" VLOG(4) << "Broadcast " << x_name << " from shape ("
<< cinn::utils::Join(x->shape, ",") << ") to shape ("
<< cinn::utils::Join(y_shape, ",") << ")."; << cinn::utils::Join(y_shape, ",") << ").";
auto out = ctx.Builder()->BroadcastTo(x, y_shape); auto out = ctx.Builder()->BroadcastTo(x, y_shape);
...@@ -70,8 +77,10 @@ void BroadcastOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperContext ...@@ -70,8 +77,10 @@ void BroadcastOpMapper(const paddle::cpp::OpDesc& op_desc, const OpMapperContext
} // namespace cinn } // namespace cinn
CINN_REGISTER_HELPER(science_broadcast) { CINN_REGISTER_HELPER(science_broadcast) {
CINN_REGISTER_OP_MAPPER(fill_constant_p, cinn::frontend::science_mappers::FillConstantOpMapper) CINN_REGISTER_OP_MAPPER(fill_constant_p,
CINN_REGISTER_OP_MAPPER(broadcast_p, cinn::frontend::science_mappers::BroadcastOpMapper) cinn::frontend::science_mappers::FillConstantOpMapper)
CINN_REGISTER_OP_MAPPER(broadcast_p,
cinn::frontend::science_mappers::BroadcastOpMapper)
return true; return true;
} }
...@@ -19,7 +19,9 @@ ...@@ -19,7 +19,9 @@
namespace cinn { namespace cinn {
namespace frontend { namespace frontend {
int GetSize(std::vector<int>& shape) { return std::accumulate(shape.begin(), shape.end(), 1, std::multiplies<int>()); } int GetSize(std::vector<int>& shape) {
return std::accumulate(shape.begin(), shape.end(), 1, std::multiplies<int>());
}
void RunModelTest(Program& program, void RunModelTest(Program& program,
const std::vector<Variable>&& inputs, const std::vector<Variable>&& inputs,
...@@ -28,13 +30,17 @@ void RunModelTest(Program& program, ...@@ -28,13 +30,17 @@ void RunModelTest(Program& program,
std::vector<std::vector<float>> inputs_data; std::vector<std::vector<float>> inputs_data;
for (auto input : inputs) { for (auto input : inputs) {
inputs_data.emplace_back(GetSize(input->shape)); inputs_data.emplace_back(GetSize(input->shape));
InitRandomVector<float>(&inputs_data.back(), inputs_data.back().size(), 0.0f, 1.0f, 1e-3); InitRandomVector<float>(
&inputs_data.back(), inputs_data.back().size(), 0.0f, 1.0f, 1e-3);
} }
auto target = common::DefaultTarget(); auto target = common::DefaultTarget();
std::unordered_map<std::string, std::pair<std::vector<float>, std::vector<float>>> outputs; std::unordered_map<std::string,
std::pair<std::vector<float>, std::vector<float>>>
outputs;
{ {
auto graph = std::make_shared<hlir::framework::Graph>(program, fetch_ids, target); auto graph =
std::make_shared<hlir::framework::Graph>(program, fetch_ids, target);
hlir::framework::ApplyPass(graph.get(), "OpFusionPass"); hlir::framework::ApplyPass(graph.get(), "OpFusionPass");
hlir::framework::ApplyPass(graph.get(), "FusionMergePass"); hlir::framework::ApplyPass(graph.get(), "FusionMergePass");
...@@ -53,11 +59,13 @@ void RunModelTest(Program& program, ...@@ -53,11 +59,13 @@ void RunModelTest(Program& program,
auto tensor = scope->GetTensor(id); auto tensor = scope->GetTensor(id);
std::vector<float> data(tensor->shape().numel()); std::vector<float> data(tensor->shape().numel());
CopyToVector(tensor, &data); CopyToVector(tensor, &data);
outputs[id] = std::pair<std::vector<float>, std::vector<float>>(data, std::vector<float>()); outputs[id] = std::pair<std::vector<float>, std::vector<float>>(
data, std::vector<float>());
} }
} }
{ {
auto graph = std::make_shared<hlir::framework::Graph>(program, fetch_ids, target); auto graph =
std::make_shared<hlir::framework::Graph>(program, fetch_ids, target);
hlir::framework::ApplyPass(graph.get(), "DotMerger"); hlir::framework::ApplyPass(graph.get(), "DotMerger");
hlir::framework::ApplyPass(graph.get(), "OpFusionPass"); hlir::framework::ApplyPass(graph.get(), "OpFusionPass");
hlir::framework::ApplyPass(graph.get(), "FusionMergePass"); hlir::framework::ApplyPass(graph.get(), "FusionMergePass");
......
...@@ -54,7 +54,9 @@ void BindUtils(py::module *m) { ...@@ -54,7 +54,9 @@ void BindUtils(py::module *m) {
.def(py::init<const std::string &, double, EventType>()) .def(py::init<const std::string &, double, EventType>())
.def_property( .def_property(
"annotation", "annotation",
[](HostEvent &self) -> const std::string & { return self.annotation_; }, [](HostEvent &self) -> const std::string & {
return self.annotation_;
},
[](HostEvent &self, const std::string &v) { self.annotation_ = v; }) [](HostEvent &self, const std::string &v) { self.annotation_ = v; })
.def_property( .def_property(
"duration", "duration",
......
...@@ -26,8 +26,10 @@ TEST(RecordEvent, HOST) { ...@@ -26,8 +26,10 @@ TEST(RecordEvent, HOST) {
ProfilerHelper::EnableCPU(); ProfilerHelper::EnableCPU();
LOG(INFO) << "Usage 1: RecordEvent for HOST"; LOG(INFO) << "Usage 1: RecordEvent for HOST";
std::vector<EventType> types = { std::vector<EventType> types = {EventType::kOrdinary,
EventType::kOrdinary, EventType::kCompile, EventType::kCompile, EventType::kInstruction}; EventType::kCompile,
EventType::kCompile,
EventType::kInstruction};
for (int i = 0; i < 4; ++i) { for (int i = 0; i < 4; ++i) {
std::string name = "evs_op_" + std::to_string(i); std::string name = "evs_op_" + std::to_string(i);
RecordEvent record_event(name, types[i]); RecordEvent record_event(name, types[i]);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册