未验证 提交 e47e82d0 编写于 作者: A Allen Guo 提交者: GitHub

[IPU] add more UTs 1/N (#44207)

* add authors
Co-authored-by: NAllen Guo <alleng@graphcore.ai>
Co-authored-by: NZhixin Yao <zhixiny@graphcore.ai>
Co-authored-by: NZhaorui Chen <zhaoruic@graphcore.ai>

* squash py changes 1/N
Co-authored-by: NZhixin Yao <zhixiny@graphcore.ai>
Co-authored-by: NZhaorui Chen <zhaoruic@graphcore.ai>
上级 1fd61106
......@@ -14,16 +14,12 @@
from __future__ import print_function
import numpy as np
import tempfile
import unittest
import sys
import os
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.jit import to_static
from paddle.utils.cpp_extension import load
from paddle.optimizer.lr import LRScheduler
import tempfile
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
SEED = 2022
......@@ -52,7 +48,9 @@ class SimpleLayer(paddle.nn.Layer):
return x
class TestBase(unittest.TestCase):
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
@classmethod
def setUpClass(cls):
......@@ -124,6 +122,9 @@ class TestBase(unittest.TestCase):
result.append(loss)
if use_ipu:
ipu_strategy.release_patch()
return np.array(result)
def test_training(self):
......
......@@ -14,59 +14,84 @@
from __future__ import print_function
import numpy as np
import tempfile
import unittest
import sys
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramCache
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
from paddle.jit import to_static
from paddle.utils.cpp_extension import load
from paddle.optimizer.lr import LRScheduler
from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramCache
import tempfile
from functools import partial
SEED = 2022
class SimpleLayer(paddle.nn.Layer):
def __init__(self, use_ipu=False):
def __init__(self,
loss_op=None,
use_softmax=True,
use_reduction=True,
use_identity_loss=True):
super(SimpleLayer, self).__init__()
self.use_ipu = use_ipu
self.loss_op = loss_op
self.conv = paddle.nn.Conv2D(in_channels=3,
out_channels=1,
kernel_size=2,
stride=1)
self.use_softmax = use_softmax
self.use_reduction = use_reduction
self.use_identity_loss = use_identity_loss
@to_static()
def forward(self, x, target=None):
x = self.conv(x)
x = paddle.fluid.layers.flatten(x, axis=1)
if target is not None:
x = paddle.fluid.layers.softmax(x)
loss = paddle.fluid.layers.cross_entropy(x, target)
if self.use_ipu:
loss = paddle.incubate.identity_loss(loss, 1)
if self.use_softmax:
x = paddle.fluid.layers.softmax(x)
if self.loss_op:
loss = self.loss_op(x, target)
else:
loss = paddle.fluid.layers.cross_entropy(x, target)
if self.use_reduction:
loss = paddle.mean(loss)
if self.use_identity_loss:
loss = paddle.incubate.identity_loss(loss, 1)
return x, loss
return x
class TestBase(unittest.TestCase):
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
@classmethod
def setUpClass(cls):
def setUp(self):
paddle.disable_static()
self.set_op_attrs()
self.set_data_feed()
def set_op_attrs(self):
self.loss_op = paddle.fluid.layers.cross_entropy
def set_data_feed(self):
self.data = paddle.uniform((32, 3, 10, 10), dtype='float32')
self.label = paddle.randint(0, 10, shape=[32], dtype='int64')
def create_model(self, use_ipu=False):
return SimpleLayer(loss_op=self.loss_op,
use_softmax=True,
use_reduction=not use_ipu,
use_identity_loss=use_ipu)
def _test(self, use_ipu=False):
paddle.seed(SEED)
np.random.seed(SEED)
model = SimpleLayer(use_ipu)
model = self.create_model(use_ipu)
optim = paddle.optimizer.Adam(learning_rate=0.01,
parameters=model.parameters())
data = paddle.uniform((32, 3, 10, 10), dtype='float32')
label = paddle.randint(0, 10, shape=[32], dtype='int64')
if use_ipu:
device = paddle.set_device('ipu')
......@@ -80,7 +105,7 @@ class TestBase(unittest.TestCase):
result = []
for epoch in range(100):
# ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(data, label)
pred, loss = model(self.data, self.label)
if not use_ipu:
loss.backward()
optim.step()
......@@ -104,7 +129,6 @@ class TestSaveLoad(TestBase):
@classmethod
def setUpClass(cls):
paddle.disable_static()
cls.save_path = tempfile.TemporaryDirectory()
@classmethod
......@@ -114,11 +138,9 @@ class TestSaveLoad(TestBase):
def _test(self, use_ipu=False):
paddle.seed(SEED)
np.random.seed(SEED)
model = SimpleLayer(use_ipu)
model = self.create_model(use_ipu)
optim = paddle.optimizer.Adam(learning_rate=0.01,
parameters=model.parameters())
data = paddle.uniform((32, 3, 10, 10), dtype='float32')
label = paddle.randint(0, 10, shape=[32], dtype='int64')
model_path = '{}/model_state_dict_{}.pdparams'.format(
self.save_path, 'ipu' if use_ipu else 'cpu')
optim_path = '{}/optim_state_dict_{}.pdopt'.format(
......@@ -136,7 +158,7 @@ class TestSaveLoad(TestBase):
result = []
for epoch in range(100):
# ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(data, label)
pred, loss = model(self.data, self.label)
if not use_ipu:
loss.backward()
optim.step()
......@@ -155,7 +177,7 @@ class TestSaveLoad(TestBase):
for epoch in range(100):
# ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(data, label)
pred, loss = model(self.data, self.label)
if not use_ipu:
loss.backward()
optim.step()
......@@ -169,10 +191,11 @@ class TestSaveLoad(TestBase):
return np.array(result)
class TestPatch(unittest.TestCase):
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestPatch(IPUOpTest):
@classmethod
def setUpClass(cls):
def setUp(cls):
paddle.disable_static()
def test(self, use_ipu=False):
......@@ -189,5 +212,46 @@ class TestPatch(unittest.TestCase):
self.assertTrue(reset_step is old_step)
class TestWithoutIdentityLoss1(TestBase):
def create_model(self, use_ipu=False):
return SimpleLayer(loss_op=self.loss_op,
use_softmax=True,
use_reduction=True,
use_identity_loss=False)
class TestWithoutIdentityLoss2(TestBase):
def set_op_attrs(self):
self.loss_op = paddle.fluid.layers.softmax_with_cross_entropy
def set_data_feed(self):
self.data = paddle.uniform((32, 3, 10, 10), dtype='float32')
self.label = paddle.randint(0, 10, shape=[32, 1], dtype='int64')
def create_model(self, use_ipu=False):
return SimpleLayer(loss_op=self.loss_op,
use_softmax=False,
use_reduction=True,
use_identity_loss=False)
class TestWithoutIdentityLoss3(TestBase):
def set_op_attrs(self):
self.loss_op = partial(paddle.fluid.layers.kldiv_loss, reduction="none")
def set_data_feed(self):
self.data = paddle.uniform((32, 3, 10, 10), dtype='float32')
self.label = paddle.rand(shape=[32, 81], dtype='float32')
def create_model(self, use_ipu=False):
return SimpleLayer(loss_op=self.loss_op,
use_softmax=True,
use_reduction=True,
use_identity_loss=False)
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_data_feed()
self.set_feed_attr()
def set_data_feed(self):
data_x = np.random.uniform(size=[1, 3])
data_y = np.random.uniform(size=[2, 2, 3])
self.feed_fp32 = {
'x': data_x.astype(np.float32),
'y': data_y.astype(np.float32)
}
self.feed_fp16 = {
'x': data_x.astype(np.float16),
'y': data_y.astype(np.float16)
}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed_fp32.values()]
self.feed_list = list(self.feed_fp32.keys())
self.feed_dtype = [x.dtype for x in self.feed_fp32.values()]
@IPUOpTest.static_graph
def build_model(self):
x = paddle.static.data(name=self.feed_list[0],
shape=self.feed_shape[0],
dtype="float32")
y = paddle.static.data(name=self.feed_list[1],
shape=self.feed_shape[1],
dtype="float32")
out = paddle.expand_as(x, y)
self.fetch_list = [out.name]
def run_model(self, exec_mode):
self.run_op_test(exec_mode)
def test(self):
for m in IPUOpTest.ExecutionMode:
if not self.skip_mode(m):
self.build_model()
self.run_model(m)
self.check()
class TestCase1(TestBase):
def set_data_feed(self):
data_x = np.random.uniform(size=[2, 3])
data_y = np.random.uniform(size=[2, 4, 2, 3])
self.feed_fp32 = {
'x': data_x.astype(np.float32),
'y': data_y.astype(np.float32)
}
self.feed_fp16 = {
'x': data_x.astype(np.float16),
'y': data_y.astype(np.float16)
}
@unittest.skip("corresponding dimensions must have the same value.")
class TestCase2(TestBase):
def set_data_feed(self):
data_x = np.random.uniform(size=[2, 3])
data_y = np.random.uniform(size=[2, 4, 3, 3])
self.feed_fp32 = {
'x': data_x.astype(np.float32),
'y': data_y.astype(np.float32)
}
self.feed_fp16 = {
'x': data_x.astype(np.float16),
'y': data_y.astype(np.float16)
}
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_data_feed()
self.set_feed_attr()
self.set_attrs()
def set_data_feed(self):
data = np.random.uniform(size=[2, 3])
self.feed_fp32 = {'x': data.astype(np.float32)}
self.feed_fp16 = {'x': data.astype(np.float16)}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed_fp32.values()]
self.feed_list = list(self.feed_fp32.keys())
self.feed_dtype = [x.dtype for x in self.feed_fp32.values()]
def set_attrs(self):
self.attrs = {"shape": [2, 2, 3]}
@IPUOpTest.static_graph
def build_model(self):
x = paddle.static.data(name=self.feed_list[0],
shape=self.feed_shape[0],
dtype="float32")
out = paddle.expand(x, **self.attrs)
self.fetch_list = [out.name]
def run_model(self, exec_mode):
self.run_op_test(exec_mode)
def test(self):
for m in IPUOpTest.ExecutionMode:
if not self.skip_mode(m):
self.build_model()
self.run_model(m)
self.check()
class TestCase1(TestBase):
def set_attrs(self):
self.attrs = {"shape": [5, 2, 2, 3]}
class TestCase2(TestBase):
def set_data_feed(self):
data = np.random.uniform(size=[2, 1, 3])
self.feed_fp32 = {'x': data.astype(np.float32)}
self.feed_fp16 = {'x': data.astype(np.float16)}
def set_attrs(self):
self.attrs = {"shape": [5, 2, 2, 3]}
@unittest.skip("corresponding dimensions must have the same value.")
class TestCase3(TestBase):
def set_attrs(self):
self.attrs = {"shape": [5, 2, 4, 3]}
@unittest.skip("Do not support `shape` = Tensors.")
class TestCase4(TestBase):
def set_data_feed(self):
data = np.random.uniform(size=[3, 3])
self.feed_fp32 = {'x': data.astype(np.float32)}
self.feed_fp16 = {'x': data.astype(np.float16)}
@IPUOpTest.static_graph
def build_model(self):
x = paddle.static.data(name=self.feed_list[0],
shape=self.feed_shape[0],
dtype="float32")
self.attrs = {
'name': 'y',
'shape': [3],
'dtype': 'int32',
'value': 3,
}
y = paddle.fluid.layers.fill_constant(**self.attrs)
out = paddle.expand(x, shape=y)
self.fetch_list = [out.name]
if __name__ == "__main__":
unittest.main()
......@@ -69,5 +69,24 @@ class TestCase1(TestBase):
self.attrs = {'fill_value': 3, 'dtype': 'int32'}
class TestError(TestBase):
@IPUOpTest.static_graph
def build_model(self):
x = paddle.fluid.data('x', [-1, 3, 13], 'float32')
x_fill = paddle.full_like(x, **self.attrs)
out = paddle.fluid.layers.elementwise_add(x_fill, x_fill)
self.fetch_list = [out.name]
def test(self):
self.build_model()
def test_error():
self.run_op_test(IPUOpTest.ExecutionMode.IPU_FP32)
self.assertRaisesRegex(Exception, "Please check tensor shape setting",
test_error)
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_data_feed()
self.set_feed_attr()
self.set_op_attrs()
def set_data_feed(self):
data = np.random.uniform(size=[2, 2, 4, 6])
self.feed_fp32 = {"x": data.astype(np.float32)}
self.feed_fp16 = {"x": data.astype(np.float16)}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed_fp32.values()]
self.feed_list = list(self.feed_fp32.keys())
def set_op_attrs(self):
self.attrs = {}
self.attrs['start_axis'] = 0
self.attrs['stop_axis'] = -1
@IPUOpTest.static_graph
def build_model(self):
x = paddle.static.data(name=self.feed_list[0],
shape=self.feed_shape[0],
dtype='float32')
out = paddle.flatten(x=x, **self.attrs)
self.fetch_list = [out.name]
def run_model(self, exec_mode):
self.run_op_test(exec_mode)
def test(self):
for m in IPUOpTest.ExecutionMode:
if not self.skip_mode(m):
self.build_model()
self.run_model(m)
self.check()
class TestCase1(TestBase):
def set_op_attrs(self):
self.attrs = {}
self.attrs['start_axis'] = 0
self.attrs['stop_axis'] = 2
class TestCase2(TestBase):
def set_op_attrs(self):
self.attrs = {}
self.attrs['start_axis'] = 1
self.attrs['stop_axis'] = -1
class TestCase3(TestBase):
def set_op_attrs(self):
self.attrs = {}
self.attrs['start_axis'] = 1
self.attrs['stop_axis'] = 2
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_feed()
self.set_feed_attr()
self.set_op_attrs()
def set_atol(self):
self.atol = 1e-6
self.rtol = 1e-6
self.atol_fp16 = 1e-3
self.rtol_fp16 = 1e-3
def set_feed(self):
data = np.random.uniform(size=[3, 2, 2])
self.feed_fp32 = {'x': data.astype(np.float32)}
self.feed_fp16 = {'x': data.astype(np.float16)}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed_fp32.values()]
self.feed_list = list(self.feed_fp32.keys())
self.feed_dtype = [x.dtype for x in self.feed_fp32.values()]
def set_op_attrs(self):
self.attrs = {}
self.attrs['axis'] = [0, 1]
@IPUOpTest.static_graph
def build_model(self):
x = paddle.static.data(name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
x = paddle.flip(x, **self.attrs)
self.fetch_list = [x.name]
def run_model(self, exec_mode):
self.run_op_test(exec_mode)
def test(self):
for m in IPUOpTest.ExecutionMode:
if not self.skip_mode(m):
self.build_model()
self.run_model(m)
self.check()
class TestCase1(TestBase):
def set_feed(self):
data = np.random.randint(0, 10, size=[3, 2, 2])
self.feed_fp32 = {'x': data.astype(np.int32)}
self.feed_fp16 = {'x': data.astype(np.int32)}
class TestCase2(TestBase):
def set_feed(self):
data = np.random.randint(0, 2, size=[4, 3, 2, 2])
self.feed_fp32 = {'x': data.astype(np.bool)}
self.feed_fp16 = {'x': data.astype(np.bool)}
if __name__ == "__main__":
unittest.main()
......@@ -127,5 +127,17 @@ class TestEqual(TestGreaterThan):
self.op = paddle.fluid.layers.equal
class TestGreaterEqual(TestGreaterThan):
def set_test_op(self):
self.op = paddle.fluid.layers.greater_equal
class TestLessEqual(TestGreaterThan):
def set_test_op(self):
self.op = paddle.fluid.layers.less_equal
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
import paddle.nn.functional as F
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_data_feed()
self.set_feed_attr()
self.set_op_attrs()
def set_data_feed(self):
x = np.random.uniform(size=[3, 4, 2, 2])
target = np.random.uniform(size=[3, 4, 2, 2])
self.feed_fp32 = {
"x": x.astype(np.float32),
"target": target.astype(np.float32)
}
self.feed_fp16 = {
"x": x.astype(np.float16),
"target": target.astype(np.float16)
}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed_fp32.values()]
self.feed_list = list(self.feed_fp32.keys())
def set_op_attrs(self):
self.attrs = {
'delta': 1.0,
}
@IPUOpTest.static_graph
def build_model(self, on_ipu):
x = paddle.static.data(name=self.feed_list[0],
shape=self.feed_shape[0],
dtype="float32")
target = paddle.static.data(name=self.feed_list[1],
shape=self.feed_shape[1],
dtype='float32')
out = paddle.fluid.layers.huber_loss(x, target, **self.attrs)
self.fetch_list = [out.name]
def run_model(self, exec_mode):
self.run_op_test(exec_mode)
def test(self):
for m in IPUOpTest.ExecutionMode:
if not self.skip_mode(m):
self.build_model(self.is_ipu_mode(m))
self.run_model(m)
self.check()
class TestCase1(TestBase):
def set_op_attrs(self):
self.attrs = {
'delta': 0.5,
}
class TestCase2(TestBase):
def set_op_attrs(self):
self.attrs = {
'delta': 0.0,
}
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_data_feed()
self.set_feed_attr()
self.set_op_attrs()
def set_data_feed(self):
x = np.random.uniform(size=[2, 3, 6, 10])
self.feed_fp32 = {"x": x.astype(np.float32)}
self.feed_fp16 = {"x": x.astype(np.float16)}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed_fp32.values()]
self.feed_list = list(self.feed_fp32.keys())
self.feed_dtype = [x.dtype for x in self.feed_fp32.values()]
def set_op_attrs(self):
self.attrs = {}
self.attrs["size"] = [12, 12]
@IPUOpTest.static_graph
def build_model(self):
x = paddle.static.data(name=self.feed_list[0],
shape=self.feed_shape[0],
dtype="float32")
out = paddle.nn.functional.interpolate(x, **self.attrs)
self.fetch_list = [out.name]
def run_model(self, exec_mode):
self.run_op_test(exec_mode)
def test(self):
for m in IPUOpTest.ExecutionMode:
if not self.skip_mode(m):
self.build_model()
self.run_model(m)
self.check()
class TestCase0(TestBase):
def set_op_attrs(self):
self.attrs = {}
self.attrs["size"] = [3, 4]
class TestCase1(TestBase):
def set_op_attrs(self):
self.attrs = {}
self.attrs["scale_factor"] = [2, 1]
@unittest.skip("Only one of size or scale_factor should be defined")
class TestCase2(TestBase):
def set_op_attrs(self):
self.attrs = {"size": [12, 12], "scale_factor": [2, 1]}
class TestCase3(TestBase):
def set_op_attrs(self):
self.attrs = {"scale_factor": 2.5}
class TestBilinear(TestBase):
@property
def fp16_enabled(self):
return False
def set_atol(self):
self.atol = 1e-6
self.rtol = 1e-6
self.atol_fp16 = 1e-3
self.rtol_fp16 = 1e-3
def set_op_attrs(self):
self.attrs = {"size": [12, 12], "mode": "bilinear"}
# Take long time
class TestBicubic(TestBase):
@property
def fp16_enabled(self):
return False
def set_atol(self):
self.atol = 1e-6
self.rtol = 1e-6
self.atol_fp16 = 1e-3
self.rtol_fp16 = 1e-3
def set_op_attrs(self):
self.attrs = {"size": [12, 12], "mode": "bicubic"}
# Trilinear requires 5-D input
class TestTrilinear(TestBase):
@property
def fp16_enabled(self):
return False
def set_atol(self):
self.atol = 1e-6
self.rtol = 1e-6
self.atol_fp16 = 1e-3
self.rtol_fp16 = 1e-3
def set_data_feed(self):
x = np.random.uniform(size=[2, 3, 3, 6, 10])
self.feed_fp32 = {"x": x.astype(np.float32)}
self.feed_fp16 = {"x": x.astype(np.float16)}
def set_op_attrs(self):
self.attrs = {
"size": [12, 12, 12],
"mode": "trilinear",
"data_format": "NCDHW"
}
# Linear requires 3-D input
class TestLinear(TestBase):
@property
def fp16_enabled(self):
return False
def set_atol(self):
self.atol = 1e-6
self.rtol = 1e-6
self.atol_fp16 = 1e-3
self.rtol_fp16 = 1e-3
def set_data_feed(self):
x = np.random.uniform(size=[3, 6, 10])
self.feed_fp32 = {"x": x.astype(np.float32)}
self.feed_fp16 = {"x": x.astype(np.float16)}
def set_op_attrs(self):
self.attrs = {"size": [12], "mode": "linear", "data_format": "NCW"}
@unittest.skip(
"Transfer to Pool Op with 2-D ksize, now we only support 1-D ksize.")
class TestArea(TestBase):
def set_data_feed(self):
x = np.random.uniform(size=[2, 3, 6, 6])
self.feed_fp32 = {"x": x.astype(np.float32)}
self.feed_fp16 = {"x": x.astype(np.float16)}
def set_op_attrs(self):
self.attrs = {"size": 12, "mode": "area"}
# align_corners option can only be set with the interpolating modes: linear | bilinear | bicubic | trilinear
class TestAlignCorners(TestBase):
@property
def fp16_enabled(self):
return False
def set_op_attrs(self):
self.attrs = {
"size": [12, 12],
"align_corners": True,
"mode": "bilinear"
}
#
class TestAlignMode(TestBase):
def set_op_attrs(self):
self.attrs = {"size": [12, 12], "align_mode": 1}
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册