未验证 提交 a7419ff5 编写于 作者: Z zhangbo9674 提交者: GitHub

[IR] Refine PhiKernelOp attributes name and delete some unused code2 (#54944)

* refine code

* add some interface for phi kernel op

* fix compile bug

* delete unused code

* support code

* fix bug

* refine code

* delete unused code

* fix compile bug

* fix compile bug

* delete unused code

* add elementwise add op

* fix compile bug

* refine code

* fix compile bug

* add ut for attribute member function

* delete unused code

* refine ut
上级 b8e4d74e
......@@ -2,7 +2,8 @@ set(PD_DIALECT_SOURCE_DIR "${PADDLE_SOURCE_DIR}/paddle/fluid/ir/dialect")
set(PD_DIALECT_BINARY_DIR "${PADDLE_BINARY_DIR}/paddle/fluid/ir/dialect")
# Generate pd_dialect files defining op using op_gen_file
set(op_gen_file ${PADDLE_SOURCE_DIR}/paddle/fluid/ir/dialect/op_gen.py)
set(op_gen_file
${PADDLE_SOURCE_DIR}/paddle/fluid/ir/dialect/op_generator/op_gen.py)
set(op_compat_yaml_file ${PADDLE_SOURCE_DIR}/paddle/phi/api/yaml/op_compat.yaml)
set(op_forward_yaml_file1
${PADDLE_SOURCE_DIR}/paddle/fluid/operators/generator/parsed_ops/ops.parsed.yaml
......@@ -17,10 +18,9 @@ set(op_backward_yaml_file2
${PADDLE_SOURCE_DIR}/paddle/fluid/operators/generator/parsed_ops/legacy_backward_ops.parsed.yaml
)
set(op_yaml_file3 ${PADDLE_SOURCE_DIR}/paddle/fluid/ir/dialect/pd_op.yaml)
set(op_yaml_file4
${PADDLE_SOURCE_DIR}/paddle/fluid/ir/dialect/pd_legacy_op.yaml)
set(op_yaml_files
${op_forward_yaml_file1},${op_forward_yaml_file2},${op_backward_yaml_file1},${op_backward_yaml_file2},${op_yaml_file3},${op_yaml_file4}
${op_forward_yaml_file1},${op_forward_yaml_file2},${op_backward_yaml_file1},${op_backward_yaml_file2},${op_yaml_file3}
)
set(op_namespace paddle,dialect)
set(dialect_name pd)
......
......@@ -15,6 +15,7 @@
#pragma once
#include "paddle/ir/core/attribute.h"
#include "paddle/ir/core/attribute_base.h"
#include "paddle/ir/core/utils.h"
#include "paddle/phi/common/data_type.h"
#include "paddle/phi/core/kernel_factory.h"
......
......@@ -44,26 +44,14 @@ void PhiKernelOp::Verify() {
"Type of attribute: kernel_key is not right."));
}
const std::string PhiKernelOp::op_name() {
return operation()
->attributes()
.at("op_name")
.dyn_cast<ir::StrAttribute>()
.data();
std::string PhiKernelOp::op_name() {
return attributes().at("op_name").dyn_cast<ir::StrAttribute>().data();
}
const std::string PhiKernelOp::kernel_name() {
return operation()
->attributes()
.at("kernel_name")
.dyn_cast<ir::StrAttribute>()
.data();
std::string PhiKernelOp::kernel_name() {
return attributes().at("kernel_name").dyn_cast<ir::StrAttribute>().data();
}
phi::KernelKey PhiKernelOp::kernel_key() {
return operation()
->attributes()
.at("kernel_key")
.dyn_cast<KernelAttribute>()
.data();
return attributes().at("kernel_key").dyn_cast<KernelAttribute>().data();
}
} // namespace dialect
......
......@@ -27,8 +27,8 @@ class PhiKernelOp : public ir::Op<PhiKernelOp> {
static const char *name() { return "phi.kernel"; }
static constexpr uint32_t attributes_num = 3;
static const char *attributes_name[attributes_num];
const std::string op_name();
const std::string kernel_name();
std::string op_name();
std::string kernel_name();
phi::KernelKey kernel_key();
void Verify();
};
......
......@@ -16,6 +16,8 @@ import argparse
import os
import yaml
from op_interface_gen import gen_exclusive_interface_str, gen_op_infer_meta_str
from op_member_func_gen import gen_op_get_inputs_outputs_str
from op_verify_gen import gen_verify_func_str
# =====================================
......@@ -29,7 +31,7 @@ H_FILE_TEMPLATE = """#ifdef GET_OP_LIST
#undef GET_OP_LIST
{op_declare}
#else
// This file is generated by "paddle/fluid/ir/dialect/op_gen.py"
// This file is generated by "paddle/fluid/ir/dialect/op_generator/op_gen.py"
#include <vector>
......@@ -78,17 +80,12 @@ op_n_attribute_declare_str = (
"static const char *attributes_name[{attribute_num}];"
)
OP_GET_INPUT_TEMPLATE = """ ir::Value {input_name}() {{ return operand({input_index}); }}
"""
OP_GET_OUTPUT_TEMPLATE = """ ir::OpResult {output_name}() {{ return result({output_index}); }}
"""
# =====================================
# String Template for cc file code gen
# =====================================
CC_FILE_TEMPLATE = """// This file is generated by "paddle/fluid/ir/dialect/op_gen.py"
CC_FILE_TEMPLATE = """// This file is generated by "paddle/fluid/ir/dialect/op_generator/op_gen.py"
#include "{h_file}"
#include "paddle/fluid/ir/dialect/pd_op.h"
#include "paddle/fluid/ir/dialect/pd_type.h"
#include "paddle/fluid/ir/dialect/pd_attribute.h"
#include "paddle/ir/core/builtin_attribute.h"
......@@ -142,12 +139,6 @@ void {op_name}::Build({build_args}) {{
{build_outputs}
}}
"""
OP_INFER_SHAPE_TEMPLATE = """
void {op_name}::InferMeta( phi::InferMetaContext *infer_meta ) {{
auto fn = PD_INFER_META(phi::{infer_meta_func});
fn(infer_meta);
}}
"""
DEFINE_OP_TYPE_ID = """
IR_DEFINE_EXPLICIT_TYPE_ID({op_name})
......@@ -1217,12 +1208,10 @@ def OpGenerator(
op_interfaces = ["OpYamlInfoInterface"]
op_traits = []
exclusive_interface_str = ""
if op_info.infer_meta_func:
op_interfaces += ["InferMetaInterface"]
exclusive_interface_str += (
" static void InferMeta( phi::InferMetaContext *infer_meta );"
)
exclusive_interface_str = gen_exclusive_interface_str(op_info)
# If op has inplace info, we will generate inplace op and non-inplace op.
for op_name in op_info.op_phi_name:
......@@ -1242,22 +1231,11 @@ def OpGenerator(
# =================================== #
# gen get input/output methods str #
# =================================== #
op_get_inputs_outputs_str = ""
for idx in range(len(op_input_name_list)):
op_get_inputs_outputs_str += OP_GET_INPUT_TEMPLATE.format(
input_name=op_input_name_list[idx],
input_index=idx,
)
for idx in range(len(op_mutable_attribute_name_list)):
op_get_inputs_outputs_str += OP_GET_INPUT_TEMPLATE.format(
input_name=op_mutable_attribute_name_list[idx],
input_index=idx + len(op_input_name_list),
)
for idx in range(len(op_output_name_list)):
op_get_inputs_outputs_str += OP_GET_OUTPUT_TEMPLATE.format(
output_name=op_output_name_list[idx],
output_index=idx,
)
op_get_inputs_outputs_str = gen_op_get_inputs_outputs_str(
op_input_name_list,
op_mutable_attribute_name_list,
op_output_name_list,
)
# =================================== #
# gen Build methods str #
......@@ -1472,12 +1450,7 @@ def OpGenerator(
op_output_optional_list,
)
op_infer_meta_str = ""
if op_info.infer_meta_func:
op_infer_meta_str = OP_INFER_SHAPE_TEMPLATE.format(
op_name=op_class_name,
infer_meta_func=op_info.infer_meta_func,
)
op_infer_meta_str = gen_op_infer_meta_str(op_info, op_class_name)
ops_name_list.append(op_class_name)
ops_declare_list.append(op_declare_str)
......
# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# generator interfaces
OP_INFER_SHAPE_TEMPLATE = """
void {op_name}::InferMeta( phi::InferMetaContext *infer_meta ) {{
auto fn = PD_INFER_META(phi::{infer_meta_func});
fn(infer_meta);
}}
"""
def gen_op_infer_meta_str(op_info, op_class_name):
op_infer_meta_str = ""
if op_info.infer_meta_func:
op_infer_meta_str = OP_INFER_SHAPE_TEMPLATE.format(
op_name=op_class_name,
infer_meta_func=op_info.infer_meta_func,
)
return op_infer_meta_str
def gen_exclusive_interface_str(op_info):
exclusive_interface_str = ""
if op_info.infer_meta_func:
exclusive_interface_str += (
" static void InferMeta( phi::InferMetaContext *infer_meta );"
)
return exclusive_interface_str
# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# generator op member function
OP_GET_INPUT_TEMPLATE = """ ir::Value {input_name}() {{ return operand({input_index}); }}
"""
OP_GET_OUTPUT_TEMPLATE = """ ir::OpResult {output_name}() {{ return result({output_index}); }}
"""
OP_GET_ATTRIBUTE_TEMPLATE = """ ir::Attribute attribute(const std::string &name) {{
PADDLE_ENFORCE(attributes().count(name) > 0,
phi::errors::PreconditionNotMet("Attribute is not exist."));
return attributes().at(name);
}}
template <typename T>
T attribute(const std::string &name) {{
PADDLE_ENFORCE(attributes().count(name) > 0 && attributes().at(name).isa<T>(),
phi::errors::PreconditionNotMet("Attribute is not right."));
return attributes().at(name).dyn_cast<T>();
}}
"""
def gen_op_get_inputs_outputs_str(
op_input_name_list, op_mutable_attribute_name_list, op_output_name_list
):
op_get_inputs_outputs_str = ""
for idx in range(len(op_input_name_list)):
op_get_inputs_outputs_str += OP_GET_INPUT_TEMPLATE.format(
input_name=op_input_name_list[idx],
input_index=idx,
)
for idx in range(len(op_mutable_attribute_name_list)):
op_get_inputs_outputs_str += OP_GET_INPUT_TEMPLATE.format(
input_name=op_mutable_attribute_name_list[idx],
input_index=idx + len(op_input_name_list),
)
for idx in range(len(op_output_name_list)):
op_get_inputs_outputs_str += OP_GET_OUTPUT_TEMPLATE.format(
output_name=op_output_name_list[idx],
output_index=idx,
)
op_get_inputs_outputs_str += OP_GET_ATTRIBUTE_TEMPLATE
return op_get_inputs_outputs_str
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
......
......@@ -15,6 +15,7 @@
#pragma once
#include "paddle/ir/core/attribute.h"
#include "paddle/ir/core/attribute_base.h"
#include "paddle/ir/core/utils.h"
#include "paddle/phi/common/data_type.h"
#include "paddle/phi/common/int_array.h"
......
- name: elementwise_add
inputs:
- typename: Tensor
name: x
optional: false
no_need_buffer: false
data_transform: {}
- typename: Tensor
name: y
optional: false
no_need_buffer: false
data_transform: {}
attrs:
- {typename: int, name: axis}
outputs:
- {typename: Tensor, name: out, optional: false, intermediate: false}
no_need_buffer: null
data_transform: null
infer_meta:
func: ElementwiseInferMeta
param: [x, y]
kernel:
func: [add_raw]
param: [x, y]
backend: null
layout: null
data_type: null
dispatch: {add: null}
force_backend: null
inplace: {out: x}
view: null
backward: add_grad
......@@ -17,6 +17,7 @@
#include <type_traits>
#include "paddle/ir/core/type.h"
#include "paddle/ir/core/type_base.h"
#include "paddle/ir/core/utils.h"
#include "paddle/phi/core/tensor_meta.h"
......
......@@ -13,8 +13,20 @@
// limitations under the License.
#include "paddle/ir/core/attribute.h"
#include "paddle/ir/core/attribute_base.h"
#include "paddle/ir/core/dialect.h"
namespace ir {
IrContext *Attribute::ir_context() const { return dialect().ir_context(); }
TypeId Attribute::type_id() { return storage_->abstract_attribute().type_id(); }
const AbstractAttribute &Attribute::abstract_attribute() {
return storage_->abstract_attribute();
}
const Dialect &Attribute::dialect() const {
return storage_->abstract_attribute().dialect();
}
} // namespace ir
......@@ -14,10 +14,15 @@
#pragma once
#include "paddle/ir/core/attribute_base.h"
#include "paddle/ir/core/cast_utils.h"
#include "paddle/ir/core/type_id.h"
namespace ir {
class AttributeStorage;
class AbstractAttribute;
class IrContext;
class Dialect;
///
/// \brief Unified interface of the Attribute class. Derivation of all Attribute
/// classes only derives interfaces, not members.
......@@ -46,17 +51,13 @@ class IR_API Attribute {
///
/// \brief Some Attribute attribute acquisition interfaces.
///
TypeId type_id() { return storage_->abstract_attribute().type_id(); }
TypeId type_id();
const AbstractAttribute &abstract_attribute() {
return storage_->abstract_attribute();
}
const AbstractAttribute &abstract_attribute();
const Storage *storage() const { return storage_; }
const Dialect &dialect() const {
return storage_->abstract_attribute().dialect();
}
const Dialect &dialect() const;
IrContext *ir_context() const;
......
......@@ -17,6 +17,7 @@
#include <list>
#include "paddle/ir/core/block.h"
#include "paddle/ir/core/ir_context.h"
#include "paddle/ir/core/operation.h"
namespace ir {
......
......@@ -19,6 +19,7 @@
#include <type_traits>
#include "paddle/ir/core/attribute.h"
#include "paddle/ir/core/attribute_base.h"
#include "paddle/ir/core/utils.h"
namespace ir {
......
......@@ -15,6 +15,7 @@
#pragma once
#include "paddle/ir/core/type.h"
#include "paddle/ir/core/type_base.h"
#include "paddle/ir/core/utils.h"
namespace ir {
......
......@@ -13,6 +13,7 @@
// limitations under the License.
#include "paddle/ir/core/operation_utils.h"
#include "paddle/ir/core/ir_context.h"
#include "paddle/ir/core/region.h"
namespace ir {
......
......@@ -14,6 +14,7 @@
#pragma once
#include <memory>
#include "paddle/ir/core/attribute.h"
#include "paddle/ir/core/op_info.h"
#include "paddle/ir/core/region.h"
......
......@@ -14,7 +14,14 @@
#include "paddle/ir/core/type.h"
#include "paddle/ir/core/dialect.h"
#include "paddle/ir/core/type_base.h"
namespace ir {
IrContext* Type::ir_context() const { return dialect().ir_context(); }
IrContext *Type::ir_context() const { return dialect().ir_context(); }
TypeId Type::type_id() { return storage_->abstract_type().type_id(); }
const AbstractType &Type::abstract_type() { return storage_->abstract_type(); }
Dialect &Type::dialect() const { return storage_->abstract_type().dialect(); }
} // namespace ir
......@@ -17,9 +17,13 @@
#include <ostream>
#include "paddle/ir/core/cast_utils.h"
#include "paddle/ir/core/type_base.h"
#include "paddle/ir/core/type_id.h"
namespace ir {
class TypeStorage;
class AbstractType;
class IrContext;
class Dialect;
///
/// \brief Unified interface of the Type class. Derivation of all Type classes
/// only derives interfaces, not members. For example, DenseTensorType,
......@@ -53,13 +57,13 @@ class IR_API Type {
///
/// \brief Some type attribute acquisition interfaces.
///
TypeId type_id() { return storage_->abstract_type().type_id(); }
TypeId type_id();
const AbstractType &abstract_type() { return storage_->abstract_type(); }
const AbstractType &abstract_type();
const Storage *storage() const { return storage_; }
Dialect &dialect() const { return storage_->abstract_type().dialect(); }
Dialect &dialect() const;
IrContext *ir_context() const;
......
......@@ -44,6 +44,7 @@
#include "paddle/fluid/ir/pass/pd_op_to_kernel_pass.h"
#include "paddle/fluid/ir/phi_kernel_adaptor/phi_kernel_adaptor.h"
#include "paddle/ir/core/attribute.h"
#include "paddle/phi/core/kernel_registry.h"
PD_DECLARE_KERNEL(full, CPU, ALL_LAYOUT);
......@@ -74,6 +75,11 @@ TEST(program_test, program) {
true);
EXPECT_EQ(block->size(), 4u);
ir::Attribute seed_attr = uniform1.attribute("seed");
ir::Int32Attribute seed_attr1 =
uniform1.attribute<ir::Int32Attribute>("seed");
EXPECT_EQ(seed_attr.dyn_cast<ir::Int32Attribute>().data(), seed_attr1.data());
// Def: B = paddle::dialect::UniformOp(...)
paddle::dialect::UniformOp uniform2 =
builder.Build<paddle::dialect::UniformOp>(std::vector<int64_t>{2, 2},
......
......@@ -20,6 +20,7 @@
#include "paddle/fluid/framework/variable.h"
#include "paddle/fluid/framework/variable_helper.h"
#include "paddle/fluid/ir/dialect/kernel_dialect.h"
#include "paddle/fluid/ir/dialect/kernel_op.h"
#include "paddle/fluid/ir/dialect/pd_attribute.h"
#include "paddle/fluid/ir/dialect/pd_dialect.h"
#include "paddle/fluid/ir/dialect/pd_type.h"
......@@ -34,6 +35,7 @@
#include "paddle/ir/core/ir_context.h"
#include "paddle/ir/core/program.h"
#include "paddle/ir/core/utils.h"
#include "paddle/phi/common/data_type.h"
#include "paddle/phi/common/place.h"
#include "paddle/phi/core/kernel_context.h"
#include "paddle/phi/core/kernel_factory.h"
......@@ -84,6 +86,24 @@ TEST(program_test, program) {
EXPECT_EQ(res1, true);
EXPECT_EQ(res2, true);
EXPECT_EQ(res3, true);
EXPECT_EQ(kernel_program->block()->size(), 3u);
EXPECT_EQ(kernel_program->block()
->front()
->dyn_cast<paddle::dialect::PhiKernelOp>()
.op_name(),
"pd.full");
EXPECT_EQ(kernel_program->block()
->front()
->dyn_cast<paddle::dialect::PhiKernelOp>()
.kernel_name(),
"full");
EXPECT_EQ(kernel_program->block()
->front()
->dyn_cast<paddle::dialect::PhiKernelOp>()
.kernel_key()
.dtype(),
phi::DataType::FLOAT32);
}
TEST(dialect_attr, attr) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册