未验证 提交 fef62298 编写于 作者: A Allen Guo 提交者: GitHub

clean unittest.skipIf 0/N (#44285)

上级 917235be
...@@ -67,9 +67,6 @@ class IPUTest(unittest.TestCase): ...@@ -67,9 +67,6 @@ class IPUTest(unittest.TestCase):
random.seed(cls.SEED) random.seed(cls.SEED)
paddle.seed(cls.SEED) paddle.seed(cls.SEED)
# Enable paddle static graph mode
paddle.enable_static()
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
"""Restore random seeds""" """Restore random seeds"""
...@@ -86,43 +83,37 @@ class IPUTest(unittest.TestCase): ...@@ -86,43 +83,37 @@ class IPUTest(unittest.TestCase):
if flag.upper() in ['1', "TRUE"]: if flag.upper() in ['1', "TRUE"]:
return True return True
# Decorator for static graph building
def static_graph(builder):
def wrapper(self, *args, **kwargs): @unittest.skipIf(not paddle.is_compiled_with_ipu(),
self.scope = paddle.static.Scope() "core is not compiled with IPU")
self.main_prog = paddle.static.Program() class IPUD2STest(IPUTest):
self.startup_prog = paddle.static.Program()
self.main_prog.random_seed = self.SEED
self.startup_prog.random_seed = self.SEED
with paddle.static.scope_guard(self.scope):
with paddle.utils.unique_name.guard(
paddle.utils.unique_name.generate('')):
with paddle.static.program_guard(self.main_prog,
self.startup_prog):
builder(self, *args, **kwargs)
return wrapper
# Cast a fp32 model to a full-fp16 model
@classmethod @classmethod
def cast_model_to_fp16(cls, main_program): def setUpClass(cls):
amp_list = paddle.static.amp.CustomOpLists() super().setUpClass()
amp_list.unsupported_list = {}
to_fp16_var_names = paddle.static.amp.cast_model_to_fp16( # Disable paddle static graph mode
main_program, amp_list, use_fp16_guard=False) paddle.disable_static()
paddle.static.amp.cast_parameters_to_fp16(
paddle.CPUPlace(), def tearDown(self):
main_program, # Manual reset when using ipumodel
to_fp16_var_names=to_fp16_var_names) if self.use_ipumodel():
paddle.framework.core.IpuBackend.get_instance().reset()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class IPUOpTest(IPUTest): class IPUOpTest(IPUTest):
"""Base Class for single op unit tests using static graph on IPU.
"""
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
super().setUpClass() super().setUpClass()
# Enable paddle static graph mode
paddle.enable_static()
# Items that a op_tester needs # Items that a op_tester needs
cls.main_prog: paddle.static.Program = None cls.main_prog: paddle.static.Program = None
cls.startup_prog: paddle.static.Program = None cls.startup_prog: paddle.static.Program = None
...@@ -166,6 +157,36 @@ class IPUOpTest(IPUTest): ...@@ -166,6 +157,36 @@ class IPUOpTest(IPUTest):
self.is_training = False self.is_training = False
self.epoch = 1 self.epoch = 1
# Decorator for static graph building
def static_graph(builder):
def wrapper(self, *args, **kwargs):
self.scope = paddle.static.Scope()
self.main_prog = paddle.static.Program()
self.startup_prog = paddle.static.Program()
self.main_prog.random_seed = self.SEED
self.startup_prog.random_seed = self.SEED
with paddle.static.scope_guard(self.scope):
with paddle.utils.unique_name.guard(
paddle.utils.unique_name.generate('')):
with paddle.static.program_guard(self.main_prog,
self.startup_prog):
builder(self, *args, **kwargs)
return wrapper
# Cast a fp32 model to a full-fp16 model
@classmethod
def cast_model_to_fp16(cls, main_program):
amp_list = paddle.static.amp.CustomOpLists()
amp_list.unsupported_list = {}
to_fp16_var_names = paddle.static.amp.cast_model_to_fp16(
main_program, amp_list, use_fp16_guard=False)
paddle.static.amp.cast_parameters_to_fp16(
paddle.CPUPlace(),
main_program,
to_fp16_var_names=to_fp16_var_names)
def run_op_test(self, exec_mode, ipu_strategy=None): def run_op_test(self, exec_mode, ipu_strategy=None):
# NOTE: some op has no inputs # NOTE: some op has no inputs
# if len(self.feed_list) == 0 or len(self.fetch_list) == 0: # if len(self.feed_list) == 0 or len(self.fetch_list) == 0:
......
...@@ -21,8 +21,6 @@ import paddle.static ...@@ -21,8 +21,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -21,8 +21,6 @@ from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest ...@@ -21,8 +21,6 @@ from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
import paddle.nn.functional as F import paddle.nn.functional as F
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from op_test_ipu import IPUOpTest from op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -20,8 +20,6 @@ import paddle.static ...@@ -20,8 +20,6 @@ import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest): class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
......
...@@ -12,16 +12,12 @@ ...@@ -12,16 +12,12 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import print_function
import tempfile import tempfile
import unittest import unittest
import numpy as np import numpy as np
import paddle import paddle
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUD2STest
SEED = 2022
class SimpleLayer(paddle.nn.Layer): class SimpleLayer(paddle.nn.Layer):
...@@ -48,22 +44,19 @@ class SimpleLayer(paddle.nn.Layer): ...@@ -48,22 +44,19 @@ class SimpleLayer(paddle.nn.Layer):
return x return x
@unittest.skipIf(not paddle.is_compiled_with_ipu(), class TestBase(IPUD2STest):
"core is not compiled with IPU")
class TestBase(IPUOpTest):
@classmethod def setUp(self):
def setUpClass(cls): super().setUp()
paddle.disable_static() self.save_path = tempfile.TemporaryDirectory()
cls.save_path = tempfile.TemporaryDirectory()
@classmethod def tearDown(self):
def tearDownClass(cls): super().tearDown()
cls.save_path.cleanup() self.save_path.cleanup()
def _test(self, use_ipu=False): def _test(self, use_ipu=False):
paddle.seed(SEED) paddle.seed(self.SEED)
np.random.seed(SEED) np.random.seed(self.SEED)
model = SimpleLayer(use_ipu) model = SimpleLayer(use_ipu)
specs = [ specs = [
paddle.static.InputSpec(name="x", paddle.static.InputSpec(name="x",
...@@ -82,7 +75,7 @@ class TestBase(IPUOpTest): ...@@ -82,7 +75,7 @@ class TestBase(IPUOpTest):
self.save_path, 'ipu' if use_ipu else 'cpu') self.save_path, 'ipu' if use_ipu else 'cpu')
if use_ipu: if use_ipu:
device = paddle.set_device('ipu') paddle.set_device('ipu')
ipu_strategy = paddle.static.IpuStrategy() ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(num_ipus=1, ipu_strategy.set_graph_config(num_ipus=1,
is_training=True, is_training=True,
...@@ -92,15 +85,15 @@ class TestBase(IPUOpTest): ...@@ -92,15 +85,15 @@ class TestBase(IPUOpTest):
ipu_strategy.set_optimizer(optim) ipu_strategy.set_optimizer(optim)
data = data.astype(np.float16) data = data.astype(np.float16)
epochs = 100
result = [] result = []
for epoch in range(100): for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update # ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(data, label) pred, loss = model(data, label)
if not use_ipu: if not use_ipu:
loss.backward() loss.backward()
optim.step() optim.step()
optim.clear_grad() optim.clear_grad()
result.append(loss) result.append(loss)
if use_ipu: if use_ipu:
...@@ -108,11 +101,10 @@ class TestBase(IPUOpTest): ...@@ -108,11 +101,10 @@ class TestBase(IPUOpTest):
paddle.save(model.state_dict(), model_path) paddle.save(model.state_dict(), model_path)
paddle.save(optim.state_dict(), optim_path) paddle.save(optim.state_dict(), optim_path)
model.set_state_dict(paddle.load(model_path)) model.set_state_dict(paddle.load(model_path))
optim.set_state_dict(paddle.load(optim_path)) optim.set_state_dict(paddle.load(optim_path))
for epoch in range(100): for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update # ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(data, label) pred, loss = model(data, label)
if not use_ipu: if not use_ipu:
...@@ -130,7 +122,6 @@ class TestBase(IPUOpTest): ...@@ -130,7 +122,6 @@ class TestBase(IPUOpTest):
def test_training(self): def test_training(self):
cpu_loss = self._test(False).flatten() cpu_loss = self._test(False).flatten()
ipu_loss = self._test(True).flatten() ipu_loss = self._test(True).flatten()
self.assertTrue(np.allclose(ipu_loss, cpu_loss, atol=1e-2)) self.assertTrue(np.allclose(ipu_loss, cpu_loss, atol=1e-2))
......
...@@ -12,21 +12,17 @@ ...@@ -12,21 +12,17 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import print_function
import tempfile import tempfile
import unittest import unittest
import numpy as np import numpy as np
import paddle import paddle
from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramCache from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramCache
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUD2STest
from paddle.jit import to_static from paddle.jit import to_static
from paddle.optimizer.lr import LRScheduler from paddle.optimizer.lr import LRScheduler
from functools import partial from functools import partial
SEED = 2022
class SimpleLayer(paddle.nn.Layer): class SimpleLayer(paddle.nn.Layer):
...@@ -64,12 +60,9 @@ class SimpleLayer(paddle.nn.Layer): ...@@ -64,12 +60,9 @@ class SimpleLayer(paddle.nn.Layer):
return x return x
@unittest.skipIf(not paddle.is_compiled_with_ipu(), class TestBase(IPUD2STest):
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self): def setUp(self):
paddle.disable_static()
self.set_op_attrs() self.set_op_attrs()
self.set_data_feed() self.set_data_feed()
...@@ -87,14 +80,14 @@ class TestBase(IPUOpTest): ...@@ -87,14 +80,14 @@ class TestBase(IPUOpTest):
use_identity_loss=use_ipu) use_identity_loss=use_ipu)
def _test(self, use_ipu=False): def _test(self, use_ipu=False):
paddle.seed(SEED) paddle.seed(self.SEED)
np.random.seed(SEED) np.random.seed(self.SEED)
model = self.create_model(use_ipu) model = self.create_model(use_ipu)
optim = paddle.optimizer.Adam(learning_rate=0.01, optim = paddle.optimizer.Adam(learning_rate=0.01,
parameters=model.parameters()) parameters=model.parameters())
if use_ipu: if use_ipu:
device = paddle.set_device('ipu') paddle.set_device('ipu')
ipu_strategy = paddle.static.IpuStrategy() ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(num_ipus=1, ipu_strategy.set_graph_config(num_ipus=1,
is_training=True, is_training=True,
...@@ -102,15 +95,15 @@ class TestBase(IPUOpTest): ...@@ -102,15 +95,15 @@ class TestBase(IPUOpTest):
enable_manual_shard=False) enable_manual_shard=False)
ipu_strategy.set_optimizer(optim) ipu_strategy.set_optimizer(optim)
epochs = 100
result = [] result = []
for epoch in range(100): for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update # ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(self.data, self.label) pred, loss = model(self.data, self.label)
if not use_ipu: if not use_ipu:
loss.backward() loss.backward()
optim.step() optim.step()
optim.clear_grad() optim.clear_grad()
result.append(loss) result.append(loss)
if use_ipu: if use_ipu:
...@@ -121,23 +114,22 @@ class TestBase(IPUOpTest): ...@@ -121,23 +114,22 @@ class TestBase(IPUOpTest):
def test_training(self): def test_training(self):
ipu_loss = self._test(True).flatten() ipu_loss = self._test(True).flatten()
cpu_loss = self._test(False).flatten() cpu_loss = self._test(False).flatten()
self.assertTrue(np.allclose(ipu_loss, cpu_loss, atol=1e-4)) self.assertTrue(np.allclose(ipu_loss, cpu_loss, atol=1e-4))
class TestSaveLoad(TestBase): class TestSaveLoad(TestBase):
@classmethod def setUp(self):
def setUpClass(cls): super().setUp()
cls.save_path = tempfile.TemporaryDirectory() self.save_path = tempfile.TemporaryDirectory()
@classmethod def tearDown(self):
def tearDownClass(cls): super().tearDown()
cls.save_path.cleanup() self.save_path.cleanup()
def _test(self, use_ipu=False): def _test(self, use_ipu=False):
paddle.seed(SEED) paddle.seed(self.SEED)
np.random.seed(SEED) np.random.seed(self.SEED)
model = self.create_model(use_ipu) model = self.create_model(use_ipu)
optim = paddle.optimizer.Adam(learning_rate=0.01, optim = paddle.optimizer.Adam(learning_rate=0.01,
parameters=model.parameters()) parameters=model.parameters())
...@@ -147,7 +139,7 @@ class TestSaveLoad(TestBase): ...@@ -147,7 +139,7 @@ class TestSaveLoad(TestBase):
self.save_path, 'ipu' if use_ipu else 'cpu') self.save_path, 'ipu' if use_ipu else 'cpu')
if use_ipu: if use_ipu:
device = paddle.set_device('ipu') paddle.set_device('ipu')
ipu_strategy = paddle.static.IpuStrategy() ipu_strategy = paddle.static.IpuStrategy()
ipu_strategy.set_graph_config(num_ipus=1, ipu_strategy.set_graph_config(num_ipus=1,
is_training=True, is_training=True,
...@@ -155,15 +147,15 @@ class TestSaveLoad(TestBase): ...@@ -155,15 +147,15 @@ class TestSaveLoad(TestBase):
enable_manual_shard=False) enable_manual_shard=False)
ipu_strategy.set_optimizer(optim) ipu_strategy.set_optimizer(optim)
epochs = 100
result = [] result = []
for epoch in range(100): for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update # ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(self.data, self.label) pred, loss = model(self.data, self.label)
if not use_ipu: if not use_ipu:
loss.backward() loss.backward()
optim.step() optim.step()
optim.clear_grad() optim.clear_grad()
result.append(loss) result.append(loss)
if use_ipu: if use_ipu:
...@@ -171,18 +163,16 @@ class TestSaveLoad(TestBase): ...@@ -171,18 +163,16 @@ class TestSaveLoad(TestBase):
paddle.save(model.state_dict(), model_path) paddle.save(model.state_dict(), model_path)
paddle.save(optim.state_dict(), optim_path) paddle.save(optim.state_dict(), optim_path)
model.set_state_dict(paddle.load(model_path)) model.set_state_dict(paddle.load(model_path))
optim.set_state_dict(paddle.load(optim_path)) optim.set_state_dict(paddle.load(optim_path))
for epoch in range(100): for _ in range(epochs):
# ipu only needs call model() to do forward/backward/grad_update # ipu only needs call model() to do forward/backward/grad_update
pred, loss = model(self.data, self.label) pred, loss = model(self.data, self.label)
if not use_ipu: if not use_ipu:
loss.backward() loss.backward()
optim.step() optim.step()
optim.clear_grad() optim.clear_grad()
result.append(loss) result.append(loss)
if use_ipu: if use_ipu:
...@@ -191,9 +181,7 @@ class TestSaveLoad(TestBase): ...@@ -191,9 +181,7 @@ class TestSaveLoad(TestBase):
return np.array(result) return np.array(result)
@unittest.skipIf(not paddle.is_compiled_with_ipu(), class TestPatch(IPUD2STest):
"core is not compiled with IPU")
class TestPatch(IPUOpTest):
def setUp(cls): def setUp(cls):
paddle.disable_static() paddle.disable_static()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册