未验证 提交 fef62298 编写于 作者: A Allen Guo 提交者: GitHub

clean unittest.skipIf 0/N (#44285)

上级 917235be
......@@ -67,9 +67,6 @@ class IPUTest(unittest.TestCase):
random.seed(cls.SEED)
paddle.seed(cls.SEED)
# Enable paddle static graph mode
paddle.enable_static()
@classmethod
def tearDownClass(cls):
"""Restore random seeds"""
......@@ -86,43 +83,37 @@ class IPUTest(unittest.TestCase):
if flag.upper() in ['1', "TRUE"]:
return True
# Decorator for static graph building
def static_graph(builder):
def wrapper(self, *args, **kwargs):
self.scope = paddle.static.Scope()
self.main_prog = paddle.static.Program()
self.startup_prog = paddle.static.Program()
self.main_prog.random_seed = self.SEED
self.startup_prog.random_seed = self.SEED
with paddle.static.scope_guard(self.scope):
with paddle.utils.unique_name.guard(
paddle.utils.unique_name.generate('')):
with paddle.static.program_guard(self.main_prog,
self.startup_prog):
builder(self, *args, **kwargs)
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class IPUD2STest(IPUTest):
return wrapper
# Cast a fp32 model to a full-fp16 model
@classmethod
def cast_model_to_fp16(cls, main_program):
amp_list = paddle.static.amp.CustomOpLists()
amp_list.unsupported_list = {}
to_fp16_var_names = paddle.static.amp.cast_model_to_fp16(
main_program, amp_list, use_fp16_guard=False)
paddle.static.amp.cast_parameters_to_fp16(
paddle.CPUPlace(),
main_program,
to_fp16_var_names=to_fp16_var_names)
def setUpClass(cls):
super().setUpClass()
# Disable paddle static graph mode
paddle.disable_static()
def tearDown(self):
# Manual reset when using ipumodel
if self.use_ipumodel():
paddle.framework.core.IpuBackend.get_instance().reset()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class IPUOpTest(IPUTest):
"""Base Class for single op unit tests using static graph on IPU.
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
# Enable paddle static graph mode
paddle.enable_static()
# Items that a op_tester needs
cls.main_prog: paddle.static.Program = None
cls.startup_prog: paddle.static.Program = None
......@@ -166,6 +157,36 @@ class IPUOpTest(IPUTest):
self.is_training = False
self.epoch = 1
# Decorator for static graph building
def static_graph(builder):
def wrapper(self, *args, **kwargs):
self.scope = paddle.static.Scope()
self.main_prog = paddle.static.Program()
self.startup_prog = paddle.static.Program()
self.main_prog.random_seed = self.SEED
self.startup_prog.random_seed = self.SEED
with paddle.static.scope_guard(self.scope):
with paddle.utils.unique_name.guard(
paddle.utils.unique_name.generate('')):
with paddle.static.program_guard(self.main_prog,
self.startup_prog):
builder(self, *args, **kwargs)
return wrapper
# Cast a fp32 model to a full-fp16 model
@classmethod
def cast_model_to_fp16(cls, main_program):
amp_list = paddle.static.amp.CustomOpLists()
amp_list.unsupported_list = {}
to_fp16_var_names = paddle.static.amp.cast_model_to_fp16(
main_program, amp_list, use_fp16_guard=False)
paddle.static.amp.cast_parameters_to_fp16(
paddle.CPUPlace(),
main_program,
to_fp16_var_names=to_fp16_var_names)
def run_op_test(self, exec_mode, ipu_strategy=None):
# NOTE: some op has no inputs
# if len(self.feed_list) == 0 or len(self.fetch_list) == 0:
......
......@@ -21,8 +21,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -21,8 +21,6 @@ from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
import paddle.nn.functional as F
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
......
......@@ -12,16 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import tempfile
import unittest
import numpy as np
import paddle
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
SEED = 2022
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUD2STest
class SimpleLayer(paddle.nn.Layer):
......@@ -48,22 +44,19 @@ class SimpleLayer(paddle.nn.Layer):
return x
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
class TestBase(IPUD2STest):
@classmethod
def setUpClass(cls):
paddle.disable_static()
cls.save_path = tempfile.TemporaryDirectory()
def setUp(self):
super().setUp()
self.save_path = tempfile.TemporaryDirectory()
@classmethod
def tearDownClass(cls):
cls.save_path.cleanup()
def tearDown(self):
super().tearDown()
self.save_path.cleanup()
def _test(self, use_ipu=False):
paddle.seed(SEED)
np.random.seed(SEED)
paddle.seed(self.SEED)
np.random.seed(self.SEED)
model = SimpleLayer(use_ipu)
specs = [
paddle.static.InputSpec(name="x",
......@@ -82,7 +75,7 @@ class TestBase(IPUOpTest):
self.save_path, 'ipu' if use_ipu else 'cpu')
if use_ipu:
device = paddle.set_device('ipu')
paddle.set_device('ipu')
ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(num_ipus=1,
is_training=True,
......@@ -92,15 +85,15 @@ class TestBase(IPUOpTest):
ipu_strategy.set_optimizer(optim)
data = data.astype(np.float16)
epochs = 100
result = []
for epoch in range(100):
for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(data, label)
if not use_ipu:
loss.backward()
optim.step()
optim.clear_grad()
result.append(loss)
if use_ipu:
......@@ -108,11 +101,10 @@ class TestBase(IPUOpTest):
paddle.save(model.state_dict(), model_path)
paddle.save(optim.state_dict(), optim_path)
model.set_state_dict(paddle.load(model_path))
optim.set_state_dict(paddle.load(optim_path))
for epoch in range(100):
for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(data, label)
if not use_ipu:
......@@ -130,7 +122,6 @@ class TestBase(IPUOpTest):
def test_training(self):
cpu_loss = self._test(False).flatten()
ipu_loss = self._test(True).flatten()
self.assertTrue(np.allclose(ipu_loss, cpu_loss, atol=1e-2))
......
......@@ -12,21 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import tempfile
import unittest
import numpy as np
import paddle
from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramCache
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUD2STest
from paddle.jit import to_static
from paddle.optimizer.lr import LRScheduler
from functools import partial
SEED = 2022
class SimpleLayer(paddle.nn.Layer):
......@@ -64,12 +60,9 @@ class SimpleLayer(paddle.nn.Layer):
return x
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
class TestBase(IPUD2STest):
def setUp(self):
paddle.disable_static()
self.set_op_attrs()
self.set_data_feed()
......@@ -87,14 +80,14 @@ class TestBase(IPUOpTest):
use_identity_loss=use_ipu)
def _test(self, use_ipu=False):
paddle.seed(SEED)
np.random.seed(SEED)
paddle.seed(self.SEED)
np.random.seed(self.SEED)
model = self.create_model(use_ipu)
optim = paddle.optimizer.Adam(learning_rate=0.01,
parameters=model.parameters())
if use_ipu:
device = paddle.set_device('ipu')
paddle.set_device('ipu')
ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(num_ipus=1,
is_training=True,
......@@ -102,15 +95,15 @@ class TestBase(IPUOpTest):
enable_manual_shard=False)
ipu_strategy.set_optimizer(optim)
epochs = 100
result = []
for epoch in range(100):
for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(self.data, self.label)
if not use_ipu:
loss.backward()
optim.step()
optim.clear_grad()
result.append(loss)
if use_ipu:
......@@ -121,23 +114,22 @@ class TestBase(IPUOpTest):
def test_training(self):
ipu_loss = self._test(True).flatten()
cpu_loss = self._test(False).flatten()
self.assertTrue(np.allclose(ipu_loss, cpu_loss, atol=1e-4))
class TestSaveLoad(TestBase):
@classmethod
def setUpClass(cls):
cls.save_path = tempfile.TemporaryDirectory()
def setUp(self):
super().setUp()
self.save_path = tempfile.TemporaryDirectory()
@classmethod
def tearDownClass(cls):
cls.save_path.cleanup()
def tearDown(self):
super().tearDown()
self.save_path.cleanup()
def _test(self, use_ipu=False):
paddle.seed(SEED)
np.random.seed(SEED)
paddle.seed(self.SEED)
np.random.seed(self.SEED)
model = self.create_model(use_ipu)
optim = paddle.optimizer.Adam(learning_rate=0.01,
parameters=model.parameters())
......@@ -147,7 +139,7 @@ class TestSaveLoad(TestBase):
self.save_path, 'ipu' if use_ipu else 'cpu')
if use_ipu:
device = paddle.set_device('ipu')
paddle.set_device('ipu')
ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(num_ipus=1,
is_training=True,
......@@ -155,15 +147,15 @@ class TestSaveLoad(TestBase):
enable_manual_shard=False)
ipu_strategy.set_optimizer(optim)
epochs = 100
result = []
for epoch in range(100):
for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(self.data, self.label)
if not use_ipu:
loss.backward()
optim.step()
optim.clear_grad()
result.append(loss)
if use_ipu:
......@@ -171,18 +163,16 @@ class TestSaveLoad(TestBase):
paddle.save(model.state_dict(), model_path)
paddle.save(optim.state_dict(), optim_path)
model.set_state_dict(paddle.load(model_path))
optim.set_state_dict(paddle.load(optim_path))
for epoch in range(100):
for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(self.data, self.label)
if not use_ipu:
loss.backward()
optim.step()
optim.clear_grad()
result.append(loss)
if use_ipu:
......@@ -191,9 +181,7 @@ class TestSaveLoad(TestBase):
return np.array(result)
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestPatch(IPUOpTest):
class TestPatch(IPUD2STest):
def setUp(cls):
paddle.disable_static()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册