test_lookahead.py 5.8 KB
Newer Older
1
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
2
#
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
6
#
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import unittest
import numpy as np
from op_test import OpTest
from paddle.fluid import core
from paddle.fluid.op import Operator
import paddle.fluid as fluid
import paddle
import paddle.nn as nn
W
wanghuancoder 已提交
25
from paddle.fluid.framework import _test_eager_guard, _in_legacy_dygraph
26 27 28 29 30 31 32

LOOKAHEAD_K = 5
LOOKAHEAD_ALPHA = 0.2
SGD_LR = 1.0


class TestLookAhead(unittest.TestCase):
33

34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
    def test_lookahead_static(self):
        paddle.enable_static()
        place = fluid.CPUPlace()
        shape = [2, 3, 8, 8]
        exe = fluid.Executor(place)
        train_program = fluid.Program()
        startup = fluid.Program()
        with fluid.program_guard(train_program, startup):
            with fluid.unique_name.guard():
                data = fluid.data(name='X', shape=[None, 1], dtype='float32')
                hidden = fluid.layers.fc(input=data, size=10)
                loss = fluid.layers.mean(hidden)

                optimizer = paddle.optimizer.SGD(learning_rate=SGD_LR)
                lookahead = paddle.incubate.optimizer.LookAhead(
                    optimizer, alpha=LOOKAHEAD_ALPHA, k=LOOKAHEAD_K)
                lookahead.minimize(loss)

        exe.run(startup)
        slow_param = None
        fast_param = None
        for i in range(10):
            if (i + 1) % LOOKAHEAD_K == 0:
                slow_param = slow_param + LOOKAHEAD_ALPHA * (fast_param -
                                                             slow_param)
            x = np.random.random(size=(10, 1)).astype('float32')
            latest_b, b_grad = exe.run(program=train_program,
                                       feed={'X': x},
                                       fetch_list=[
                                           'fc_0.b_0',
                                           'fc_0.b_0@GRAD',
                                       ])
            if i == 0:
                slow_param = latest_b
            if (i + 1) % LOOKAHEAD_K == 0:
69 70 71
                self.assertAlmostEqual(slow_param.all(),
                                       latest_b.all(),
                                       delta=5e-3)
72 73
            fast_param = latest_b - SGD_LR * b_grad

W
wanghuancoder 已提交
74
    def func_test_look_ahead_dygraph(self):
75 76 77 78 79 80 81 82 83
        BATCH_SIZE = 16
        BATCH_NUM = 4
        EPOCH_NUM = 4

        IMAGE_SIZE = 784
        CLASS_NUM = 10

        # define a random dataset
        class RandomDataset(paddle.io.Dataset):
84

85 86 87 88 89 90 91 92 93 94 95 96 97
            def __init__(self, num_samples):
                self.num_samples = num_samples

            def __getitem__(self, idx):
                image = np.random.random([IMAGE_SIZE]).astype('float32')
                label = np.random.randint(0, CLASS_NUM - 1,
                                          (1, )).astype('int64')
                return image, label

            def __len__(self):
                return self.num_samples

        class LinearNet(nn.Layer):
98

99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117
            def __init__(self):
                super(LinearNet, self).__init__()
                self._linear = nn.Linear(IMAGE_SIZE, CLASS_NUM)
                self.bias = self._linear.bias

            @paddle.jit.to_static
            def forward(self, x):
                return self._linear(x)

        def train(layer, loader, loss_fn, opt):
            idx = 0
            slow_param = None
            fast_param = None
            for epoch_id in range(EPOCH_NUM):
                for batch_id, (image, label) in enumerate(loader()):
                    idx += 1
                    out = layer(image)
                    loss = loss_fn(out, label)
                    loss.backward()
118 119
                    fast_param = (layer.bias.numpy() -
                                  SGD_LR * layer.bias.grad.numpy())
120 121 122 123 124 125
                    opt.step()
                    if idx == 1:
                        slow_param = fast_param
                    if idx % LOOKAHEAD_K == 0:
                        slow_param = slow_param + LOOKAHEAD_ALPHA * (
                            fast_param - slow_param)
126 127 128
                        self.assertAlmostEqual(np.mean(slow_param),
                                               np.mean(layer.bias.numpy()),
                                               delta=5e-3)
129 130 131 132 133 134
                    opt.clear_grad()

        layer = LinearNet()
        loss_fn = nn.CrossEntropyLoss()
        optimizer = paddle.optimizer.SGD(learning_rate=SGD_LR,
                                         parameters=layer.parameters())
135 136 137
        lookahead = paddle.incubate.optimizer.LookAhead(optimizer,
                                                        alpha=LOOKAHEAD_ALPHA,
                                                        k=LOOKAHEAD_K)
138 139 140

        # create data loader
        dataset = RandomDataset(BATCH_NUM * BATCH_SIZE)
141 142 143 144 145
        loader = paddle.io.DataLoader(dataset,
                                      batch_size=BATCH_SIZE,
                                      shuffle=True,
                                      drop_last=True,
                                      num_workers=2)
146 147 148

        train(layer, loader, loss_fn, lookahead)

W
wanghuancoder 已提交
149 150 151 152 153
    def test_look_ahead_dygraph(self):
        with _test_eager_guard():
            self.func_test_look_ahead_dygraph()
        self.func_test_look_ahead_dygraph()

154 155 156

if __name__ == "__main__":
    unittest.main()