test_save_load_ipu.py 7.1 KB
Newer Older
J
jianghaicheng 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#  Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import tempfile
J
jianghaicheng 已提交
16 17 18 19 20 21 22 23 24 25 26 27 28
import unittest

import numpy as np
import paddle
import paddle.static
from paddle.fluid.tests.unittests.ipu.op_test_ipu import IPUOpTest


@unittest.skipIf(not paddle.is_compiled_with_ipu(),
                 "core is not compiled with IPU")
class TestBase(IPUOpTest):
    def setUp(self):
        self.set_atol()
29 30 31
        self.set_data_feed()
        self.set_feed_attr()
        self.set_op_attrs()
J
jianghaicheng 已提交
32

33 34 35 36
    def set_data_feed(self):
        data = np.random.uniform(size=[1, 3, 10, 10])
        self.feed_fp32 = {"in_0": data.astype(np.float32)}
        self.feed_fp16 = {"in_0": data.astype(np.float16)}
J
jianghaicheng 已提交
37

38 39 40
    def set_feed_attr(self):
        self.feed_shape = [x.shape for x in self.feed_fp32.values()]
        self.feed_list = list(self.feed_fp32.keys())
J
jianghaicheng 已提交
41

42
    def set_op_attrs(self):
J
jianghaicheng 已提交
43 44 45 46 47
        self.attrs = {}
        self.attrs['steps'] = 100
        self.attrs['save_at_step'] = 20
        self.attrs['is_training'] = True
        self.attrs['opt_type'] = 'sgd'
48 49
        self.attrs['enable_fp16'] = False
        self.attrs['model_path'] = tempfile.TemporaryDirectory()
J
jianghaicheng 已提交
50 51

    def _test_base(self, save_otherwise_load):
52
        scope = paddle.static.Scope()
J
jianghaicheng 已提交
53 54 55 56
        main_prog = paddle.static.Program()
        startup_prog = paddle.static.Program()
        main_prog.random_seed = self.SEED
        startup_prog.random_seed = self.SEED
57
        generator = paddle.fluid.unique_name.UniqueNameGenerator()
J
jianghaicheng 已提交
58

59 60
        with paddle.fluid.unique_name.guard(generator):
            with paddle.static.scope_guard(scope):
J
jianghaicheng 已提交
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90
                with paddle.static.program_guard(main_prog, startup_prog):
                    x = paddle.static.data(
                        name=self.feed_list[0],
                        shape=self.feed_shape[0],
                        dtype='float32')
                    conv1 = paddle.static.nn.conv2d(
                        x,
                        num_filters=3,
                        filter_size=3,
                        bias_attr=False,
                        name='conv2d')
                    loss = paddle.mean(conv1)

                    if self.attrs['is_training']:
                        if self.attrs['opt_type'] == 'sgd':
                            sgd = paddle.optimizer.SGD(learning_rate=1e-2)
                            sgd.minimize(loss)
                        elif self.attrs['opt_type'] == 'adam':
                            adam = paddle.optimizer.Adam(learning_rate=1e-2)
                            adam.minimize(loss)
                        elif self.attrs['opt_type'] == 'lamb':
                            lamb = paddle.optimizer.Lamb(learning_rate=1e-2)
                            lamb.minimize(loss)
                    fetch_list = [loss.name]

                place = paddle.IPUPlace()
                exe = paddle.static.Executor(place)
                exe.run(startup_prog)

                if not save_otherwise_load:
91
                    paddle.static.load(main_prog, self.attrs['model_path'].name)
J
jianghaicheng 已提交
92

Y
yaozhixin 已提交
93
                ipu_strategy = paddle.static.IpuStrategy()
94
                ipu_strategy.set_graph_config(
Y
yaozhixin 已提交
95
                    is_training=self.attrs['is_training'])
96 97
                ipu_strategy.set_precision_config(
                    enable_fp16=self.attrs['enable_fp16'])
A
Allen Guo 已提交
98 99 100
                ipu_program = paddle.static.IpuCompiledProgram(
                    main_prog, ipu_strategy=ipu_strategy)
                program = ipu_program.compile(self.feed_list, fetch_list)
J
jianghaicheng 已提交
101 102 103 104 105

                result = []
                run_steps = self.attrs['steps'] if save_otherwise_load \
                    else self.attrs['steps'] - self.attrs['save_at_step']

106 107
                feed = self.feed_fp16 if self.attrs[
                    'enable_fp16'] else self.feed_fp32
J
jianghaicheng 已提交
108
                for i in range(run_steps):
109
                    tmp = exe.run(program, feed=feed, fetch_list=fetch_list)
J
jianghaicheng 已提交
110 111 112

                    if save_otherwise_load and \
                        i == self.attrs['save_at_step'] - 1:
A
Allen Guo 已提交
113
                        ipu_program._backend.weights_to_host()
114 115
                        paddle.static.save(main_prog,
                                           self.attrs['model_path'].name)
J
jianghaicheng 已提交
116 117 118 119 120 121 122 123 124 125 126 127 128 129 130

                    if save_otherwise_load and i >= self.attrs['save_at_step']:
                        result.append(tmp)
                    elif not save_otherwise_load:
                        result.append(tmp)

                return np.asarray(result).flatten()

    def test_base(self):
        res0 = self._test_base(True)
        res1 = self._test_base(False)

        self.assertTrue(
            np.allclose(
                res0.flatten(), res1.flatten(), atol=self.atol))
131
        self.attrs['model_path'].cleanup()
J
jianghaicheng 已提交
132 133 134


class TestAdam(TestBase):
135
    def set_op_attrs(self):
J
jianghaicheng 已提交
136 137 138 139 140
        self.attrs = {}
        self.attrs['steps'] = 100
        self.attrs['save_at_step'] = 20
        self.attrs['is_training'] = True
        self.attrs['opt_type'] = 'adam'
141 142
        self.attrs['enable_fp16'] = False
        self.attrs['model_path'] = tempfile.TemporaryDirectory()
J
jianghaicheng 已提交
143 144 145


class TestLamb(TestBase):
146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182
    def set_op_attrs(self):
        self.attrs = {}
        self.attrs['steps'] = 100
        self.attrs['save_at_step'] = 20
        self.attrs['is_training'] = True
        self.attrs['opt_type'] = 'lamb'
        self.attrs['enable_fp16'] = False
        self.attrs['model_path'] = tempfile.TemporaryDirectory()


@unittest.skipIf(IPUOpTest.use_ipumodel(), "skip for ipumodel")
class TestSGDFP16(TestBase):
    def set_op_attrs(self):
        self.attrs = {}
        self.attrs['steps'] = 100
        self.attrs['save_at_step'] = 20
        self.attrs['is_training'] = True
        self.attrs['opt_type'] = 'sgd'
        self.attrs['enable_fp16'] = True
        self.attrs['model_path'] = tempfile.TemporaryDirectory()


@unittest.skipIf(IPUOpTest.use_ipumodel(), "skip for ipumodel")
class TestAdamFP16(TestBase):
    def set_op_attrs(self):
        self.attrs = {}
        self.attrs['steps'] = 100
        self.attrs['save_at_step'] = 20
        self.attrs['is_training'] = True
        self.attrs['opt_type'] = 'adam'
        self.attrs['enable_fp16'] = True
        self.attrs['model_path'] = tempfile.TemporaryDirectory()


@unittest.skipIf(IPUOpTest.use_ipumodel(), "skip for ipumodel")
class TestLambFP16(TestBase):
    def set_op_attrs(self):
J
jianghaicheng 已提交
183 184 185 186 187
        self.attrs = {}
        self.attrs['steps'] = 100
        self.attrs['save_at_step'] = 20
        self.attrs['is_training'] = True
        self.attrs['opt_type'] = 'lamb'
188 189
        self.attrs['enable_fp16'] = True
        self.attrs['model_path'] = tempfile.TemporaryDirectory()
J
jianghaicheng 已提交
190 191 192 193


if __name__ == "__main__":
    unittest.main()