hybrid_parallel_mp_clip_grad.py 1.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest

C
co63oc 已提交
17
from hybrid_parallel_mp_model import TestDistMPTraining
18 19 20

import paddle

21 22
# log = logging.getLogger("HybridParallel")
# log.setLevel(logging.WARNING)
23 24


C
co63oc 已提交
25
class TestMPClipGrad(TestDistMPTraining):
26 27
    def build_optimizer(self, model):
        grad_clip = paddle.nn.ClipGradByGlobalNorm(2.0)
28 29 30 31 32 33
        scheduler = paddle.optimizer.lr.ExponentialDecay(
            learning_rate=0.001, gamma=0.999, verbose=True
        )
        optimizer = paddle.optimizer.SGD(
            scheduler, grad_clip=grad_clip, parameters=model.parameters()
        )
34 35 36 37 38
        return optimizer


if __name__ == "__main__":
    unittest.main()