test_dy2static_ipu.py 8.8 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#  Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

A
Allen Guo 已提交
15
import tempfile
16
import unittest
A
Allen Guo 已提交
17 18

import numpy as np
19
import paddle
20 21 22
from paddle.fluid.dygraph.dygraph_to_static.program_translator import (
    ProgramCache,
)
A
Allen Guo 已提交
23
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUD2STest
24 25
from paddle.jit import to_static
from paddle.optimizer.lr import LRScheduler
A
Allen Guo 已提交
26
from functools import partial
27 28 29


class SimpleLayer(paddle.nn.Layer):
30 31 32 33 34 35 36
    def __init__(
        self,
        loss_op=None,
        use_softmax=True,
        use_reduction=True,
        use_identity_loss=True,
    ):
37
        super(SimpleLayer, self).__init__()
A
Allen Guo 已提交
38
        self.loss_op = loss_op
39 40 41
        self.conv = paddle.nn.Conv2D(
            in_channels=3, out_channels=1, kernel_size=2, stride=1
        )
A
Allen Guo 已提交
42 43 44
        self.use_softmax = use_softmax
        self.use_reduction = use_reduction
        self.use_identity_loss = use_identity_loss
45 46 47 48 49 50

    @to_static()
    def forward(self, x, target=None):
        x = self.conv(x)
        x = paddle.fluid.layers.flatten(x, axis=1)
        if target is not None:
A
Allen Guo 已提交
51 52 53 54
            if self.use_softmax:
                x = paddle.fluid.layers.softmax(x)
            if self.loss_op:
                loss = self.loss_op(x, target)
55
            else:
A
Allen Guo 已提交
56 57
                loss = paddle.fluid.layers.cross_entropy(x, target)
            if self.use_reduction:
58
                loss = paddle.mean(loss)
A
Allen Guo 已提交
59 60
            if self.use_identity_loss:
                loss = paddle.incubate.identity_loss(loss, 1)
61 62 63 64
            return x, loss
        return x


A
Allen Guo 已提交
65
class TestBase(IPUD2STest):
A
Allen Guo 已提交
66 67 68 69 70 71 72 73
    def setUp(self):
        self.set_op_attrs()
        self.set_data_feed()

    def set_op_attrs(self):
        self.loss_op = paddle.fluid.layers.cross_entropy

    def set_data_feed(self):
A
Allen Guo 已提交
74 75
        self.data = paddle.uniform((8, 3, 10, 10), dtype='float32')
        self.label = paddle.randint(0, 10, shape=[8], dtype='int64')
A
Allen Guo 已提交
76 77

    def create_model(self, use_ipu=False):
78 79 80 81 82 83
        return SimpleLayer(
            loss_op=self.loss_op,
            use_softmax=True,
            use_reduction=not use_ipu,
            use_identity_loss=use_ipu,
        )
84 85

    def _test(self, use_ipu=False):
A
Allen Guo 已提交
86 87
        paddle.seed(self.SEED)
        np.random.seed(self.SEED)
A
Allen Guo 已提交
88
        model = self.create_model(use_ipu)
89 90 91
        optim = paddle.optimizer.Adam(
            learning_rate=0.01, parameters=model.parameters()
        )
92 93

        if use_ipu:
A
Allen Guo 已提交
94
            paddle.set_device('ipu')
95
            ipu_strategy = paddle.static.IpuStrategy()
96 97 98 99 100 101
            ipu_strategy.set_graph_config(
                num_ipus=1,
                is_training=True,
                micro_batch_size=1,
                enable_manual_shard=False,
            )
102 103
            ipu_strategy.set_optimizer(optim)

A
Allen Guo 已提交
104
        epochs = 100
105
        result = []
A
Allen Guo 已提交
106
        for _ in range(epochs):
107
            # ipu only needs call model() to do forward/backward/grad_update
A
Allen Guo 已提交
108
            pred, loss = model(self.data, self.label)
109 110 111 112 113 114 115 116 117 118 119 120 121 122
            if not use_ipu:
                loss.backward()
                optim.step()
                optim.clear_grad()
            result.append(loss)

        if use_ipu:
            ipu_strategy.release_patch()

        return np.array(result)

    def test_training(self):
        ipu_loss = self._test(True).flatten()
        cpu_loss = self._test(False).flatten()
123
        np.testing.assert_allclose(ipu_loss, cpu_loss, rtol=1e-05, atol=1e-4)
124 125 126


class TestSaveLoad(TestBase):
A
Allen Guo 已提交
127 128 129
    def setUp(self):
        super().setUp()
        self.save_path = tempfile.TemporaryDirectory()
130

A
Allen Guo 已提交
131 132 133
    def tearDown(self):
        super().tearDown()
        self.save_path.cleanup()
134 135

    def _test(self, use_ipu=False):
A
Allen Guo 已提交
136 137
        paddle.seed(self.SEED)
        np.random.seed(self.SEED)
A
Allen Guo 已提交
138
        model = self.create_model(use_ipu)
139 140 141
        optim = paddle.optimizer.Adam(
            learning_rate=0.01, parameters=model.parameters()
        )
142
        model_path = '{}/model_state_dict_{}.pdparams'.format(
143 144
            self.save_path, 'ipu' if use_ipu else 'cpu'
        )
145
        optim_path = '{}/optim_state_dict_{}.pdopt'.format(
146 147
            self.save_path, 'ipu' if use_ipu else 'cpu'
        )
148 149

        if use_ipu:
A
Allen Guo 已提交
150
            paddle.set_device('ipu')
151
            ipu_strategy = paddle.static.IpuStrategy()
152 153 154 155 156 157
            ipu_strategy.set_graph_config(
                num_ipus=1,
                is_training=True,
                micro_batch_size=1,
                enable_manual_shard=False,
            )
158 159
            ipu_strategy.set_optimizer(optim)

A
Allen Guo 已提交
160
        epochs = 100
161
        result = []
A
Allen Guo 已提交
162
        for _ in range(epochs):
163
            # ipu only needs call model() to do forward/backward/grad_update
A
Allen Guo 已提交
164
            pred, loss = model(self.data, self.label)
165 166 167 168 169 170 171 172 173 174 175 176 177 178
            if not use_ipu:
                loss.backward()
                optim.step()
                optim.clear_grad()
            result.append(loss)

        if use_ipu:
            paddle.fluid.core.IpuBackend.get_instance().weights_to_host()

        paddle.save(model.state_dict(), model_path)
        paddle.save(optim.state_dict(), optim_path)
        model.set_state_dict(paddle.load(model_path))
        optim.set_state_dict(paddle.load(optim_path))

A
Allen Guo 已提交
179
        for _ in range(epochs):
180
            # ipu only needs call model() to do forward/backward/grad_update
A
Allen Guo 已提交
181
            pred, loss = model(self.data, self.label)
182 183 184 185 186 187 188 189 190 191 192 193
            if not use_ipu:
                loss.backward()
                optim.step()
                optim.clear_grad()
            result.append(loss)

        if use_ipu:
            ipu_strategy.release_patch()

        return np.array(result)


A
Allen Guo 已提交
194
class TestPatch(IPUD2STest):
A
Allen Guo 已提交
195
    def setUp(cls):
196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211
        paddle.disable_static()

    def test(self, use_ipu=False):
        old_getter = ProgramCache.__getitem__
        old_step = LRScheduler.step

        ipu_strategy = paddle.static.IpuStrategy()
        ipu_strategy.release_patch()

        reset_getter = ProgramCache.__getitem__
        reset_step = LRScheduler.step

        self.assertTrue(reset_getter is old_getter)
        self.assertTrue(reset_step is old_step)


A
Allen Guo 已提交
212 213
class TestWithoutIdentityLoss1(TestBase):
    def create_model(self, use_ipu=False):
214 215 216 217 218 219
        return SimpleLayer(
            loss_op=self.loss_op,
            use_softmax=True,
            use_reduction=True,
            use_identity_loss=False,
        )
A
Allen Guo 已提交
220 221 222 223 224 225 226


class TestWithoutIdentityLoss2(TestBase):
    def set_op_attrs(self):
        self.loss_op = paddle.fluid.layers.softmax_with_cross_entropy

    def set_data_feed(self):
A
Allen Guo 已提交
227 228
        self.data = paddle.uniform((8, 3, 10, 10), dtype='float32')
        self.label = paddle.randint(0, 10, shape=[8, 1], dtype='int64')
A
Allen Guo 已提交
229 230

    def create_model(self, use_ipu=False):
231 232 233 234 235 236
        return SimpleLayer(
            loss_op=self.loss_op,
            use_softmax=False,
            use_reduction=True,
            use_identity_loss=False,
        )
A
Allen Guo 已提交
237 238 239 240 241 242 243


class TestWithoutIdentityLoss3(TestBase):
    def set_op_attrs(self):
        self.loss_op = partial(paddle.fluid.layers.kldiv_loss, reduction="none")

    def set_data_feed(self):
A
Allen Guo 已提交
244 245 246 247
        self.data = paddle.uniform((8, 3, 10, 10), dtype='float32')
        self.label = paddle.rand(shape=[8, 81], dtype='float32')

    def create_model(self, use_ipu=False):
248 249 250 251 252 253
        return SimpleLayer(
            loss_op=self.loss_op,
            use_softmax=True,
            use_reduction=True,
            use_identity_loss=False,
        )
A
Allen Guo 已提交
254 255 256 257 258 259 260 261 262 263 264


class TestWithoutIdentityLoss4(TestBase):
    def set_op_attrs(self):
        self.loss_op = paddle.nn.functional.binary_cross_entropy

    def set_data_feed(self):
        self.data = paddle.uniform((8, 3, 10, 10), dtype='float32')
        self.label = paddle.rand(shape=[8, 81], dtype='float32')

    def create_model(self, use_ipu=False):
265 266 267 268 269 270
        return SimpleLayer(
            loss_op=self.loss_op,
            use_softmax=True,
            use_reduction=False,
            use_identity_loss=False,
        )
A
Allen Guo 已提交
271 272 273 274 275 276 277 278


class TestWithoutIdentityLoss5(TestBase):
    def set_op_attrs(self):
        self.loss_op = paddle.fluid.layers.sigmoid_cross_entropy_with_logits

    def set_data_feed(self):
        self.data = paddle.uniform((8, 3, 10, 10), dtype='float32')
279 280 281
        self.label = paddle.randint(0, 10, shape=[8, 81], dtype='int64').astype(
            'float32'
        )
A
Allen Guo 已提交
282 283

    def create_model(self, use_ipu=False):
284 285 286 287 288 289
        return SimpleLayer(
            loss_op=self.loss_op,
            use_softmax=True,
            use_reduction=True,
            use_identity_loss=False,
        )
A
Allen Guo 已提交
290 291


292 293
if __name__ == "__main__":
    unittest.main()