未验证 提交 cd84e1bf 编写于 作者: N niuliling123 提交者: GitHub

Fix layout autotune in windows ci (#45751)

上级 98a5af1a
......@@ -86,7 +86,8 @@ paddle::experimental::Tensor conv2d_dygraph_function(
auto transformer = egr::EagerLayoutAutotune<std::string>(
op_name, tensors_vector, &data_format);
auto NEW_input = transformer->TransInTensor("input", input);
bool is_enable_tune =
paddle::imperative::LayoutAutoTune::Instance().UseLayoutAutoTune();
paddle::imperative::LayoutAutoTune::Instance().DisableLayoutAutoTune();
auto out = conv2d_dygraph_function(NEW_input,
filter,
......@@ -100,7 +101,9 @@ paddle::experimental::Tensor conv2d_dygraph_function(
workspace_size_MB,
exhaustive_search);
transformer->SetOutTensorLayout(&out);
paddle::imperative::LayoutAutoTune::Instance().EnableLayoutAutoTune();
if (is_enable_tune) {
paddle::imperative::LayoutAutoTune::Instance().EnableLayoutAutoTune();
}
// Returns
return out;
}
......
......@@ -46,6 +46,9 @@ class SimpleNet(paddle.nn.Layer):
class LayoutAutoTune(unittest.TestCase):
def setUp(self):
self.use_autoune()
def use_autoune(self):
if paddle.is_compiled_with_cuda():
paddle.incubate.autotune.set_config(
......@@ -85,16 +88,18 @@ class LayoutAutoTune(unittest.TestCase):
def test_enable_autotune(self):
if self.use_autoune():
conv_out, predict = self.train(data_format="NCHW")
self.assertEqual(conv_out.shape, [1, 14, 14, 8])
self.assertEqual(predict.shape, [1, 2])
if paddle.fluid.core.use_layout_autotune():
self.assertEqual(conv_out.shape, [1, 14, 14, 8])
self.assertEqual(predict.shape, [1, 2])
else:
self.assertEqual(conv_out.shape, [1, 8, 14, 14])
self.assertEqual(predict.shape, [1, 2])
else:
conv_out, predict = self.train(data_format="NCHW")
self.assertEqual(conv_out.shape, [1, 8, 14, 14])
self.assertEqual(predict.shape, [1, 2])
def test_transpose_op_transposer(self):
if not self.use_autoune():
return
conv = paddle.nn.Conv2D(3, 8, (3, 3))
data = paddle.rand([1, 3, 16, 14])
label_data = paddle.randint(0, 1, shape=[1, 1], dtype="int64")
......@@ -112,12 +117,14 @@ class LayoutAutoTune(unittest.TestCase):
scaled.backward()
scaler.minimize(optimizer, scaled)
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1, 12, 8, 14])
if paddle.fluid.core.use_layout_autotune():
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1, 12, 8, 14])
else:
self.assertEqual(conv_out.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [1, 12, 8, 14])
def test_flatten_op_transposer(self):
if not self.use_autoune():
return
conv = paddle.nn.Conv2D(3, 8, (3, 3))
flatten = paddle.nn.Flatten(start_axis=1, stop_axis=2)
data = paddle.rand([1, 3, 16, 14])
......@@ -129,25 +136,42 @@ class LayoutAutoTune(unittest.TestCase):
# because it flatten the C and H dimensions.
out = flatten(conv_out)
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1, 112, 12])
if paddle.fluid.core.use_layout_autotune():
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1, 112, 12])
else:
self.assertEqual(conv_out.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [1, 112, 12])
def test_argmax_op_transposer_keep_dims(self):
if not self.use_autoune():
return
conv = paddle.nn.Conv2D(3, 8, (3, 3))
data = paddle.rand([1, 3, 16, 14])
with paddle.amp.auto_cast(level="O2"):
conv_out = conv(data)
# conv_out.shape = [1, 14, 12, 8] with NHWC
out = paddle.argmax(conv_out, axis=1, keepdim=True)
if paddle.fluid.core.use_layout_autotune():
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1, 14, 12, 1])
else:
self.assertEqual(conv_out.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [1, 1, 14, 12])
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1, 14, 12, 1])
def test_argmax_op_transposer_ff(self):
conv = paddle.nn.Conv2D(3, 8, (3, 3))
data = paddle.rand([1, 3, 16, 14])
with paddle.amp.auto_cast(level="O2"):
conv_out = conv(data)
# conv_out.shape = [1, 14, 12, 8] with NHWC
out = paddle.argmax(conv_out)
if paddle.fluid.core.use_layout_autotune():
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1])
else:
self.assertEqual(conv_out.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [1])
def test_argmax_op_transposer(self):
if not self.use_autoune():
return
def test_argmax_op_transposer_t(self):
conv = paddle.nn.Conv2D(3, 8, (3, 3))
data = paddle.rand([1, 3, 16, 14])
with paddle.amp.auto_cast(level="O2"):
......@@ -155,12 +179,14 @@ class LayoutAutoTune(unittest.TestCase):
# conv_out.shape = [1, 14, 12, 8] with NHWC
out = paddle.argmax(conv_out)
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1])
if paddle.fluid.core.use_layout_autotune():
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [1])
else:
self.assertEqual(conv_out.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [1])
def test_concat_op_transposer(self):
if not self.use_autoune():
return
in1 = paddle.rand([1, 8, 14, 12])
conv = paddle.nn.Conv2D(3, 8, (3, 3))
data = paddle.rand([1, 3, 16, 14])
......@@ -169,12 +195,14 @@ class LayoutAutoTune(unittest.TestCase):
# conv_out.shape = [1, 14, 12, 8] with NHWC
out = paddle.concat(x=[conv_out, in1], axis=0)
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [2, 8, 14, 12])
if paddle.fluid.core.use_layout_autotune():
self.assertEqual(conv_out.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [2, 8, 14, 12])
else:
self.assertEqual(conv_out.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [2, 8, 14, 12])
def test_concat_op_no_transposer(self):
if not self.use_autoune():
return
conv = paddle.nn.Conv2D(3, 8, (3, 3))
data1 = paddle.rand([1, 3, 16, 14])
data2 = paddle.rand([1, 3, 16, 14])
......@@ -184,8 +212,12 @@ class LayoutAutoTune(unittest.TestCase):
# conv_out.shape = [1, 14, 12, 8] with NHWC
out = paddle.concat(x=[conv_out1, conv_out2], axis=0)
self.assertEqual(conv_out1.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [2, 14, 12, 8])
if paddle.fluid.core.use_layout_autotune():
self.assertEqual(conv_out1.shape, [1, 14, 12, 8])
self.assertEqual(out.shape, [2, 14, 12, 8])
else:
self.assertEqual(conv_out1.shape, [1, 8, 14, 12])
self.assertEqual(out.shape, [2, 8, 14, 12])
class TestAutoTuneAPI(unittest.TestCase):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册