未验证 提交 99720a07 编写于 作者: J jianghaicheng 提交者: GitHub

ipu_commit_tests p8 (#38088)

上级 3db6facb
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.compiler as compiler
import paddle.optimizer
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import (IPUOpTest,
np_dtype_to_fluid_str)
paddle.enable_static()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_feed()
self.set_feed_attr()
self.set_attrs()
def set_feed(self):
self.feed = {
"x": np.random.uniform(size=[1, 3, 2, 2]).astype('float32'),
}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed.values()]
self.feed_list = list(self.feed.keys())
self.feed_dtype = [
np_dtype_to_fluid_str(x.dtype) for x in self.feed.values()
]
def set_attrs(self):
self.attrs = {"axis": -1}
def _test_base(self, run_ipu=True):
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
out = paddle.fluid.layers.softmax(x, **self.attrs)
fetch_list = [out.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]
def test_base(self):
res0 = self._test_base(False)
res1 = self._test_base(True)
self.assertTrue(
np.allclose(
res0.flatten(), res1.flatten(), atol=self.atol))
self.assertTrue(res0.shape == res1.shape)
class TestCase1(TestBase):
def set_attrs(self):
self.attrs = {"axis": 2}
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.compiler as compiler
import paddle.optimizer
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import (IPUOpTest,
np_dtype_to_fluid_str)
paddle.enable_static()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_feed()
self.set_feed_attr()
self.set_attrs()
def set_feed(self):
self.feed = {
"x": np.random.uniform(size=[1, 3, 1, 5]).astype('float32'),
}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed.values()]
self.feed_list = list(self.feed.keys())
self.feed_dtype = [
np_dtype_to_fluid_str(x.dtype) for x in self.feed.values()
]
def set_attrs(self):
self.attrs = {"axes": [0]}
def _test_base(self, run_ipu=True):
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
out = paddle.fluid.layers.squeeze(x, **self.attrs)
fetch_list = [out.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]
def test_base(self):
res0 = self._test_base(False)
res1 = self._test_base(True)
self.assertTrue(
np.allclose(
res0.flatten(), res1.flatten(), atol=self.atol))
self.assertTrue(res0.shape == res1.shape)
class TestCase1(TestBase):
def set_attrs(self):
self.attrs = {"axes": []}
class TestCase2(TestBase):
def set_attrs(self):
self.attrs = {"axes": [-2]}
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.compiler as compiler
import paddle.optimizer
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import (IPUOpTest,
np_dtype_to_fluid_str)
paddle.enable_static()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_feed()
self.set_feed_attr()
self.set_attrs()
def set_feed(self):
self.feed = {
"x": np.random.uniform(size=[1, 2]).astype('float32'),
"y": np.random.uniform(size=[1, 2]).astype('float32'),
"z": np.random.uniform(size=[1, 2]).astype('float32'),
}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed.values()]
self.feed_list = list(self.feed.keys())
self.feed_dtype = [
np_dtype_to_fluid_str(x.dtype) for x in self.feed.values()
]
def set_attrs(self):
self.attrs = {"axis": 0}
def _test_base(self, run_ipu=True):
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
y = paddle.static.data(
name=self.feed_list[1],
shape=self.feed_shape[1],
dtype=self.feed_dtype[1])
z = paddle.static.data(
name=self.feed_list[2],
shape=self.feed_shape[2],
dtype=self.feed_dtype[2])
out = paddle.fluid.layers.stack([x, y, z], **self.attrs)
fetch_list = [out.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]
def test_base(self):
res0 = self._test_base(False)
res1 = self._test_base(True)
self.assertTrue(
np.allclose(
res0.flatten(), res1.flatten(), atol=self.atol))
self.assertTrue(res0.shape == res1.shape)
class TestCase1(TestBase):
def set_attrs(self):
self.attrs = {"axis": -2}
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.compiler as compiler
import paddle.optimizer
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import (IPUOpTest,
np_dtype_to_fluid_str)
paddle.enable_static()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_feed()
self.set_feed_attr()
self.set_attrs()
def set_feed(self):
self.feed = {
"x": np.random.uniform(size=[1, 3, 2, 2]).astype('float32'),
"y": np.random.uniform(size=[1, 3, 2, 2]).astype('float32'),
}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed.values()]
self.feed_list = list(self.feed.keys())
self.feed_dtype = [
np_dtype_to_fluid_str(x.dtype) for x in self.feed.values()
]
def set_attrs(self):
self.attrs = {}
def _test_base(self, run_ipu=True):
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
y = paddle.static.data(
name=self.feed_list[1],
shape=self.feed_shape[1],
dtype=self.feed_dtype[1])
out = paddle.fluid.layers.sum([x, y], **self.attrs)
fetch_list = [out.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]
def test_base(self):
res0 = self._test_base(False)
res1 = self._test_base(True)
self.assertTrue(
np.allclose(
res0.flatten(), res1.flatten(), atol=self.atol))
self.assertTrue(res0.shape == res1.shape)
@unittest.skip('')
class TestCase1(TestBase):
def set_feed(self):
self.feed = {
"x": np.random.uniform(size=[1, 3, 2, 2]).astype('float32'),
"y": np.random.uniform(size=[1, 3, 2, 2]).astype('float32'),
"z": np.random.uniform(size=[1, 3, 2, 2]).astype('float32'),
}
def _test_base(self, run_ipu=True):
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
y = paddle.static.data(
name=self.feed_list[1],
shape=self.feed_shape[1],
dtype=self.feed_dtype[1])
z = paddle.static.data(
name=self.feed_list[2],
shape=self.feed_shape[2],
dtype=self.feed_dtype[2])
out = paddle.fluid.layers.sum([x, y, z], **self.attrs)
fetch_list = [out.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.compiler as compiler
import paddle.optimizer
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest
paddle.enable_static()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestTopKOp(IPUOpTest):
def setUp(self):
self.set_ops()
self.set_atol()
self.set_training()
self.k = 3
self.use_K_as_const_variable = False
self.set_feed()
self.set_attrs()
def set_ops(self):
self.ops = [
paddle.fluid.layers.topk,
paddle.topk # use top_k_v2 implementation
]
def set_feed(self):
self.feed_shape = []
self.feed_shape.append([3, 5])
self.feed = {}
self.feed_list = []
self.feed["in_0"] = np.random.uniform(
size=self.feed_shape[0]).astype(np.float32)
self.feed_list.append("in_0")
if self.use_K_as_const_variable:
# self.feed["in_1"] = np.array([self.k]).astype("int32")
# self.feed_list.append("in_1")
pass
print("[TestTopKop] feed data:\n%s" % self.feed["in_0"])
def set_attrs(self):
self.attrs = {
# "axis": -1,
# "sorted": True
}
if not self.use_K_as_const_variable:
self.attrs["k"] = self.k
def _test_base(self, run_ipu=True, op=None, data_feed=None):
assert (op is not None)
assert (data_feed is not None)
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype='float32')
if not self.use_K_as_const_variable:
topk_values, topk_indices = op(x, **self.attrs)
else:
# !important, popart cannot accept non const tensor
# K_t = paddle.static.data(name="in_1", shape=[1], dtype='int32')
K_t = fluid.layers.fill_constant(
shape=[1], dtype='int32', value=self.k, name="in_2")
topk_values, topk_indices = op(x, K_t, **self.attrs)
fetch_list = [topk_values.name, topk_indices.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
print("Running inference ...")
result = exe.run(program, feed=data_feed, fetch_list=fetch_list)
print("Complete running infrence.")
return result
def test_base(self):
for op in self.ops:
res0_topk_values, res0_topk_indices = self._test_base(
True, op=op, data_feed=self.feed)
res1_topk_values, res1_topk_indices = self._test_base(
False, op=paddle.fluid.layers.topk, data_feed=self.feed)
print("[TestTopKop] IPU res0 values:\n%s\n" % res0_topk_values)
print("[TestTopKop] CPU res1 values:\n%s\n" % res1_topk_values)
view_type = np.uint32
print("[TestTopKop] IPU res0 indices:\n%s\n" %
res0_topk_indices.astype(view_type))
print("[TestTopKop] CPU res1 indices:\n%s\n" % res1_topk_indices)
self.assertTrue(
np.allclose(
res0_topk_values.flatten(),
res1_topk_values.flatten(),
atol=self.atol))
self.assertTrue(
np.allclose(
res0_topk_indices.astype(view_type).flatten(),
res1_topk_indices.flatten(),
atol=self.atol))
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.compiler as compiler
import paddle.optimizer
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import (IPUOpTest,
np_dtype_to_fluid_str)
paddle.enable_static()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_feed()
self.set_feed_attr()
self.set_attrs()
def set_feed(self):
self.feed = {
"x": np.random.uniform(size=[1, 3, 10, 10]).astype('float32'),
}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed.values()]
self.feed_list = list(self.feed.keys())
self.feed_dtype = [
np_dtype_to_fluid_str(x.dtype) for x in self.feed.values()
]
def set_attrs(self):
self.attrs = {"perm": [0, 2, 3, 1]}
def _test_base(self, run_ipu=True):
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
out = paddle.fluid.layers.transpose(x, **self.attrs)
fetch_list = [out.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]
def test_base(self):
res0 = self._test_base(False)
res1 = self._test_base(True)
self.assertTrue(
np.allclose(
res0.flatten(), res1.flatten(), atol=self.atol))
self.assertTrue(res0.shape == res1.shape)
class TestCase1(TestBase):
def set_attrs(self):
self.attrs = {"perm": [0, 1, 2, 3]}
class TestCase2(TestBase):
def set_feed(self):
self.feed = {
"x": np.random.uniform(size=[1, 2, 3, 4, 5]).astype('float32'),
}
def set_attrs(self):
self.attrs = {"perm": [4, 0, 2, 3, 1]}
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.compiler as compiler
import paddle.optimizer
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import (IPUOpTest,
np_dtype_to_fluid_str)
paddle.enable_static()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_feed()
self.set_feed_attr()
self.set_attrs()
def set_feed(self):
self.feed = {"x": np.random.uniform(size=[1, 2, 3]).astype('float32')}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed.values()]
self.feed_list = list(self.feed.keys())
self.feed_dtype = [
np_dtype_to_fluid_str(x.dtype) for x in self.feed.values()
]
def set_attrs(self):
self.attrs = {"axes": 0}
def _test_base(self, run_ipu=True):
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
out = paddle.fluid.layers.unsqueeze(x, **self.attrs)
fetch_list = [out.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]
def test_base(self):
res0 = self._test_base(False)
res1 = self._test_base(True)
self.assertTrue(
np.allclose(
res0.flatten(), res1.flatten(), atol=self.atol))
self.assertTrue(res0.shape == res1.shape)
class TestCase1(TestBase):
def set_attrs(self):
self.attrs = {"axes": -1}
class TestCase2(TestBase):
def set_attrs(self):
self.attrs = {"axes": [1, 2]}
if __name__ == "__main__":
unittest.main()
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.compiler as compiler
from paddle.fluid.executor import global_scope
import paddle.optimizer
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import (IPUOpTest,
np_dtype_to_fluid_str)
paddle.enable_static()
@unittest.skipIf(not paddle.is_compiled_with_ipu(),
"core is not compiled with IPU")
class TestBase(IPUOpTest):
def setUp(self):
self.set_atol()
self.set_training()
self.set_feed()
self.set_feed_attr()
self.set_attrs()
def set_feed(self):
self.feed = {
"x": np.random.uniform(size=[1, 3, 10, 10]).astype('float32'),
}
def set_feed_attr(self):
self.feed_shape = [x.shape for x in self.feed.values()]
self.feed_list = list(self.feed.keys())
self.feed_dtype = [
np_dtype_to_fluid_str(x.dtype) for x in self.feed.values()
]
def set_attrs(self):
self.attrs = {
"shape": [30, 10],
"inplace": True,
}
def _test_base(self, run_ipu=True):
scope = fluid.core.Scope()
main_prog = paddle.static.Program()
startup_prog = paddle.static.Program()
SEED = self.SEED
main_prog.random_seed = SEED
startup_prog.random_seed = SEED
with fluid.scope_guard(scope):
with paddle.static.program_guard(main_prog, startup_prog):
x = paddle.static.data(
name=self.feed_list[0],
shape=self.feed_shape[0],
dtype=self.feed_dtype[0])
add1 = paddle.fluid.layers.elementwise_add(x, x)
reshape = paddle.fluid.layers.reshape(add1, **self.attrs)
add2 = paddle.fluid.layers.elementwise_add(reshape, reshape)
scale1 = paddle.fluid.layers.scale(add2)
scale2 = paddle.fluid.layers.scale(scale1, scale=1.3, bias=0.5)
scale3 = paddle.fluid.layers.scale(scale2, scale=2, bias=0.7)
fetch_list = [scale3.name]
if run_ipu:
place = paddle.IPUPlace()
else:
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
exe.run(startup_prog)
scale1_out = main_prog.global_block().ops[4].output("Out")[0]
main_prog.global_block().ops[4]._rename_output(scale1_out,
add2.name)
main_prog.global_block().ops[5]._rename_input(scale1_out, add2.name)
if run_ipu:
feed_list = self.feed_list
ipu_strategy = compiler.get_ipu_strategy()
ipu_strategy.is_training = self.is_training
program = compiler.IPUCompiledProgram(
main_prog,
ipu_strategy=ipu_strategy).compile(feed_list, fetch_list)
else:
program = main_prog
result = exe.run(program, feed=self.feed, fetch_list=fetch_list)
return result[0]
def test_base(self):
res0 = self._test_base(True)
res1 = self._test_base(False)
self.assertTrue(
np.allclose(
res0.flatten(), res1.flatten(), atol=self.atol))
self.assertTrue(res0.shape == res1.shape)
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册