“a56e16a74513c2b1e2d645f002010d3174064d00”上不存在“paddle/pten/api/git@gitcode.net:RobotFutures/Paddle.git”
test_dgc_optimizer.py 4.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import unittest

import paddle.fluid.framework as framework
import paddle.fluid.optimizer as optimizer
21
import paddle.fluid.regularizer as regularizer
22 23 24 25 26 27 28 29 30 31 32
import paddle.compat as cpt
from paddle.fluid.backward import append_backward
from paddle.fluid.transpiler.details import program_to_code


class TestDGCMomentumOptimizer(unittest.TestCase):
    class MockDGCMomentum(optimizer.DGCMomentumOptimizer):
        def get_accumulators(self):
            return self._accumulators

        def get_velocity_str(self):
33
            return self._u_velocity_acc_str
34

35 36 37 38
    def check_dgc_momentum_optimizer(self,
                                     dims=[5, 10, 8],
                                     name="momentum",
                                     regularization=None):
39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
        init_program = framework.Program()
        program = framework.Program()
        block = program.global_block()
        mul_x = block.create_parameter(
            dtype="float32",
            shape=[dims[0], dims[1]],
            lod_level=0,
            name="mul.x",
            optimize_attr={'learning_rate': 1.1})
        mul_y = block.create_var(
            dtype="float32",
            shape=[dims[1], dims[2]],
            lod_level=0,
            name="mul.y")
        mul_out = block.create_var(
            dtype="float32",
            shape=[dims[0], dims[2]],
            lod_level=0,
            name="mul.out")
        block.append_op(
            type="mul",
            inputs={"X": mul_x,
                    "Y": mul_y},
            outputs={"Out": mul_out},
            attrs={"x_num_col_dims": 1})
        learning_rate = 0.01
65

66
        dgc_momentum_optimizer = self.MockDGCMomentum(
67 68 69 70
            learning_rate=learning_rate,
            momentum=0.2,
            rampup_begin_step=0,
            regularization=regularization)
71 72 73 74 75 76
        mean_out = block.create_var(
            dtype="float32", shape=[1], lod_level=0, name="mean.out")
        block.append_op(
            type="mean", inputs={"X": mul_out}, outputs={"Out": mean_out})
        # params_grads = append_backward(mean_out)
        params_grads = dgc_momentum_optimizer.backward(mean_out)
77
        accumulator_count = 1 if name == "momentum" else 2
78
        self.assertEqual(len(params_grads), 1)
79 80
        self.assertEqual(
            len(dgc_momentum_optimizer.get_accumulators()), accumulator_count)
81 82 83 84 85 86 87 88 89
        with framework.program_guard(program, init_program):
            opts = dgc_momentum_optimizer.apply_gradients(params_grads)
        self.assertEqual(len(opts), 2)
        sgd_op = opts[-1]
        self.assertEqual([op.type for op in opts], ["scale", name])
        self.assertFalse(sgd_op.attr('use_nesterov'))

        # Check accumulators
        accumulators = dgc_momentum_optimizer.get_accumulators()
90
        self.assertEqual(len(accumulators), accumulator_count)
91 92 93 94 95 96 97 98
        self.assertTrue(
            dgc_momentum_optimizer.get_velocity_str() in accumulators)
        velocity_acc = accumulators[dgc_momentum_optimizer.get_velocity_str()]
        self.assertEqual(len(velocity_acc), 1)
        self.assertTrue(mul_x.name in velocity_acc)

        # Check init_program
        init_ops = init_program.global_block().ops
99
        self.assertEqual(len(init_ops), 1)
100 101 102 103 104 105 106
        self.assertEqual(init_ops[0].type, "fill_constant")
        self.assertAlmostEqual(init_ops[0].attr('value'), learning_rate)

        with open("test_dgc_optimizer_" + name + ".log", "w") as f:
            program_to_code(program, fout=f)

    def test_momentum_without_dgc(self):
107 108
        self.check_dgc_momentum_optimizer(
            regularization=regularizer.L1Decay(1e-4))
109 110 111 112

    def test_momentum_with_dgc(self):
        # 16 * 1024 = 16384, use dgc momentum
        self.check_dgc_momentum_optimizer(
113 114 115
            dims=[16, 1024, 8],
            name="dgc_momentum",
            regularization=regularizer.L2Decay(1e-4))
116 117 118 119


if __name__ == '__main__':
    unittest.main()