未验证 提交 7901abe8 编写于 作者: N niuliling123 提交者: GitHub

Add padding parameter for layout lightly op check (#55937)

上级 c472d105
......@@ -1233,6 +1233,7 @@ class DygraphForwardFunctionGenerator(DygraphFunctionGeneratorBase):
'end',
'stop',
'perm',
'paddings',
]
heavily_sensitive_attr = ['data_format', 'data_layout']
layout_autotune_attr = []
......
......@@ -67,7 +67,7 @@ inline std::shared_ptr<EagerLayoutTransformer> EagerLayoutAutotune(
kSlotSmallVectorSize>& tensors_vector,
T* attr UNUSED) {
// For lightly op like reduce
if (!(DesiredLayout() == phi::DataLayout::UNDEFINED)) {
if ((DesiredLayout() == phi::DataLayout::UNDEFINED)) {
VLOG(4) << "LayoutAutotune was unstarted. Current op :" << op_name;
return std::make_shared<EagerLayoutTransformer>(
op_name, tensors_vector, tensors_vector[0][0].layout());
......@@ -85,7 +85,21 @@ inline std::shared_ptr<EagerLayoutTransformer> EagerLayoutAutotune(
// For lightly op like argmax
return EagerLayoutAutotune<T1>(op_name, tensors_vector, axis);
}
template <>
inline std::shared_ptr<EagerLayoutTransformer> EagerLayoutAutotune(
const std::string& op_name,
const paddle::small_vector<std::vector<paddle::Tensor>,
kSlotSmallVectorSize>& tensors_vector,
paddle::experimental::IntArray* padddings,
std::string* attr) {
// for pad
if ((DesiredLayout() == phi::DataLayout::UNDEFINED)) {
VLOG(4) << "LayoutAutotune was unstarted. Current op :" << op_name;
return std::make_shared<EagerLayoutTransformer>(
op_name, tensors_vector, tensors_vector[0][0].layout());
}
return std::make_shared<EagerLightlyLayoutSensitiveOpTransformer>(op_name);
}
template <>
inline std::shared_ptr<EagerLayoutTransformer> EagerLayoutAutotune(
const std::string& op_name,
......@@ -104,8 +118,9 @@ inline std::shared_ptr<EagerLayoutTransformer> EagerLayoutAutotune(
auto data_type = tensors_vector[0][0].dtype();
bool is_tune_fp32 =
(data_type == phi::DataType::FLOAT32) && (*attr == "NHWC");
bool is_tune_fp16 =
(data_type == phi::DataType::FLOAT16) && (*attr == "NCHW");
bool is_tune_fp16 = (data_type == phi::DataType::FLOAT16 ||
data_type == phi::DataType::BFLOAT16) &&
(*attr == "NCHW");
VLOG(4) << "LayoutAutoTune assert with dtype and layout, Current op : "
<< op_name;
if (is_tune_fp32) {
......
......@@ -199,7 +199,7 @@ class EagerHeavilyLayoutSensitiveOpTransformer : public EagerLayoutTransformer {
explicit EagerHeavilyLayoutSensitiveOpTransformer(const std::string& op_name,
std::string* layout)
: op_name_(op_name), desired_layout_(DesiredLayout()) {
VLOG(4) << "Heavily op: " << op_name;
VLOG(4) << "Heavily op: " << op_name << " layout " << *layout;
*layout = phi::DataLayoutToString(DesiredLayout());
}
......
......@@ -159,7 +159,8 @@ paddle::imperative::NameVarMap<VarType> AutoTuneLayout(
const paddle::imperative::NameVarMap<VarType>& outs,
paddle::framework::AttributeMap* attrs,
const std::shared_ptr<imperative::Tracer>& tracer) {
if (!tracer->UseLayoutAutoTune()) {
if (!tracer->UseLayoutAutoTune() ||
op_type.find("_grad") != std::string::npos) {
return ins;
}
// When layout autotuning is enabled, the tuner will check the desired layout.
......@@ -191,7 +192,8 @@ paddle::imperative::NameVarMap<VarType> AutoTuneLayout(
(conv_in_type == framework::proto::VarType::FP32);
bool is_tune_fp16 =
(PADDLE_GET_CONST(std::string, (*attrs)["data_format"]) == "NCHW") &&
(conv_in_type == framework::proto::VarType::FP16);
(conv_in_type == framework::proto::VarType::FP16 ||
conv_in_type == framework::proto::VarType::BF16);
if (is_tune_fp32) {
LayoutAutoTune::Instance().SetDesiredLayout(DataLayout::NCHW);
LayoutAutoTune::Instance().SetDefaultLayout(DataLayout::NHWC);
......
......@@ -167,6 +167,21 @@ class LayoutAutoTune(unittest.TestCase):
self.assertEqual(conv_out1.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [2, 8, 14, 12])
def test_padding_tranpose(self):
conv = paddle.nn.Conv2D(3, 8, (3, 3))
data = paddle.rand([1, 3, 16, 14])
mode = "constant"
pad = [1, 0, 1, 2]
padding = paddle.nn.Pad2D(padding=pad, mode=mode, data_format='NCHW')
with paddle.amp.auto_cast(level="O2", dtype="bfloat16"):
conv_out = conv(data)
# conv_out.shape = [1, 14, 12, 8] with NHWC
out = padding(conv_out)
# from NHWC to NCHW
self.assertEqual(conv_out.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [1, 8, 17, 13])
class TestAutoTuneAPI(unittest.TestCase):
def test_set_config_warnings(self):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册