test_rmsprop_op.py 7.5 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

17
import unittest
18

19
import numpy as np
20 21
import paddle.fluid.core as core
from paddle.fluid.op import Operator
S
sneaxiy 已提交
22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
import paddle.fluid as fluid


def create_selected_rows_and_tensor(scope, place, height, row_num,
                                    embedding_size):
    sr = scope.var("@selected_rows@").get_selected_rows()
    tensor = scope.var("grad").get_tensor()

    rows = np.random.random_integers(
        low=0, high=height - 1, size=[row_num, ]).astype('int64')
    sr_val = np.random.random(size=[row_num, embedding_size]).astype('float32')

    sr.set_height(height)
    sr.set_rows(rows)
    sr.get_tensor().set(sr_val, place)

    tensor_val = np.zeros(shape=[height, embedding_size], dtype='float32')
    for i in range(row_num):
        row = rows[i]
        tensor_val[row, :] = tensor_val[row, :] + sr_val[i, :]

    tensor.set(tensor_val, place)
    return tensor_val, sr_val
45 46 47


class TestBase(unittest.TestCase):
S
sneaxiy 已提交
48 49 50 51 52 53 54
    def setup(self,
              place,
              is_sparse,
              centered,
              size,
              row_num=None,
              epsilon=1e-6):
55 56
        np.random.seed(5)  # fix seed

S
sneaxiy 已提交
57 58 59
        self.scope = fluid.global_scope()
        self.place = place

60
        self.param_name = "param"
S
sneaxiy 已提交
61
        self.param = np.random.random(size).astype("float32")
62 63

        self.mean_square_name = "mean_square"
S
sneaxiy 已提交
64 65
        self.mean_square = np.random.uniform(
            low=1, high=2, size=size).astype("float32")
66 67

        self.mean_grad_name = "mean_grad"
S
sneaxiy 已提交
68
        self.mean_grad = np.random.random(size).astype("float32")
69 70 71 72 73

        self.lr_name = "lr"
        self.learning_rate = np.array([0.01]).astype("float32")

        self.grad_name = "grad"
S
sneaxiy 已提交
74 75 76 77 78 79 80 81 82 83

        self.is_sparse = is_sparse
        if self.is_sparse:
            self.grad_sr_name = "@selected_rows@"
            self.grad, self.grad_sr = create_selected_rows_and_tensor(
                self.scope, place, size[0], row_num, size[1])
        else:
            self.grad = np.random.random(size).astype("float32")
            grad_tensor = self.scope.var(self.grad_name).get_tensor()
            grad_tensor.set(self.grad, place)
84 85

        self.moment_name = "moment"
S
sneaxiy 已提交
86 87
        self.moment = np.random.uniform(
            low=0, high=1, size=size).astype("float32")
88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107

        self.epsilon = epsilon
        self.decay = 0.9
        self.momentum = 0.0
        self.centered = centered

        self.ms_out = self.decay * self.mean_square + (1 - self.decay
                                                       ) * self.grad * self.grad
        if centered:
            self.mg_out = self.decay * self.mean_grad + (1 - self.decay
                                                         ) * self.grad
            self.moment_out = self.momentum * self.moment + \
                              self.learning_rate * self.grad / np.sqrt(self.ms_out - np.square(self.mg_out) + self.epsilon)
        else:
            self.moment_out = self.momentum * self.moment + \
                              self.learning_rate * self.grad / np.sqrt(self.ms_out + self.epsilon)

        self.param_out = self.param - self.moment_out

        # create and initialize Param Variable
S
sneaxiy 已提交
108 109
        self.param_tensor = self.scope.var(self.param_name).get_tensor()
        self.param_tensor.set(self.param, place)
110

S
sneaxiy 已提交
111 112 113
        self.mean_square_tensor = self.scope.var(
            self.mean_square_name).get_tensor()
        self.mean_square_tensor.set(self.mean_square, place)
114

S
sneaxiy 已提交
115
        lr = self.scope.var(self.lr_name).get_tensor()
116 117
        lr.set(self.learning_rate, place)

S
sneaxiy 已提交
118 119
        self.moment_tensor = self.scope.var(self.moment_name).get_tensor()
        self.moment_tensor.set(self.moment, place)
120

S
sneaxiy 已提交
121 122 123 124
        if self.centered:
            self.mean_grad_tensor = self.scope.var(
                self.mean_grad_name).get_tensor()
            self.mean_grad_tensor.set(self.mean_grad, place)
125

S
sneaxiy 已提交
126 127 128 129 130 131
    def check(self, actual_t, expect_t, place, out_name, atol=1e-5):
        self.assertTrue(
            np.allclose(
                actual_t, expect_t, atol=atol),
            "Output (" + out_name + ") has diff at " + str(place) + "\nExpect "
            + str(expect_t) + "\n" + "But Got" + str(actual_t))
132

S
sneaxiy 已提交
133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161

class TestRmspropOp(TestBase):
    def check_with_place(self,
                         place,
                         is_sparse,
                         centered,
                         size,
                         row_num=None,
                         epsilon=1e-6):
        self.setup(place, is_sparse, centered, size, row_num, epsilon)
        self.run_and_check()

    def run_and_check(self):
        grad_name = self.grad_sr_name if self.is_sparse else self.grad_name

        kwargs = {
            'Param': self.param_name,
            'Grad': grad_name,
            'MeanSquare': self.mean_square_name,
            'Moment': self.moment_name,
            'LearningRate': self.lr_name,
            'ParamOut': self.param_name,
            'MeanSquareOut': self.mean_square_name,
            'MomentOut': self.moment_name,
            'epsilon': self.epsilon,
            'decay': self.decay,
            'momentum': self.momentum,
            'centered': self.centered
        }
162 163

        if self.centered:
S
sneaxiy 已提交
164 165 166 167 168 169 170
            kwargs['MeanGrad'] = self.mean_grad_name
            kwargs['MeanGradOut'] = self.mean_grad_name

        rmsprop_op = Operator('rmsprop', **kwargs)
        atol = 1e-6

        rmsprop_op.run(self.scope, self.place)
171 172

        self.check(
S
sneaxiy 已提交
173 174
            np.array(self.mean_square_tensor), self.ms_out, self.place,
            self.mean_square_name)
175
        self.check(
S
sneaxiy 已提交
176
            np.array(self.moment_tensor),
177
            self.moment_out,
S
sneaxiy 已提交
178
            self.place,
179
            self.moment_name,
S
sneaxiy 已提交
180
            atol=atol)
181
        self.check(
S
sneaxiy 已提交
182
            np.array(self.param_tensor),
183
            self.param_out,
S
sneaxiy 已提交
184
            self.place,
185
            self.param_name,
S
sneaxiy 已提交
186
            atol=atol)
187 188 189

        if self.centered:
            self.check(
S
sneaxiy 已提交
190 191
                np.array(self.mean_grad_tensor), self.mg_out, self.place,
                self.mean_grad_name)
192 193 194 195 196

    def test_rmsprop(self):
        places = [core.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(core.CUDAPlace(0))
S
sneaxiy 已提交
197 198

        size = (128, 320)
199
        for place in places:
S
sneaxiy 已提交
200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219
            for centered in [False, True]:
                with fluid.scope_guard(core.Scope()):
                    self.check_with_place(
                        place, is_sparse=False, centered=centered, size=size)

                with fluid.scope_guard(core.Scope()):
                    self.check_with_place(
                        place,
                        is_sparse=True,
                        centered=centered,
                        row_num=512,
                        size=size)

                with fluid.scope_guard(core.Scope()):
                    self.check_with_place(
                        place,
                        is_sparse=True,
                        centered=centered,
                        row_num=60,
                        size=size)
220 221


222 223
if __name__ == "__main__":
    unittest.main()