From e3964e5a431ec84e4477c0bd92bd7d4b5d26b8fa Mon Sep 17 00:00:00 2001 From: tangwei12 Date: Thu, 18 Oct 2018 10:04:11 +0800 Subject: [PATCH] lookup table bug fix about lr, test=develop (#13946) --- python/paddle/fluid/framework.py | 8 ++++++-- python/paddle/fluid/optimizer.py | 5 +++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 5f3111f363c..b07d0131a32 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -1522,13 +1522,17 @@ class Program(object): >>> with program.lr_schedule_guard(): >>> lr = lr * decay """ + + tmp_role = self._current_role + tmp_var = self._op_role_var + OpRole = core.op_proto_and_checker_maker.OpRole self._current_role = OpRole.LRSched # TODO(typhoonzero): how to set target learning rate var self._op_role_var = [] yield - self._op_role_var = [] - self._current_role = OpRole.Forward + self._op_role_var = tmp_var + self._current_role = tmp_role def __str__(self): """ diff --git a/python/paddle/fluid/optimizer.py b/python/paddle/fluid/optimizer.py index ed1784bd276..17af44afdde 100644 --- a/python/paddle/fluid/optimizer.py +++ b/python/paddle/fluid/optimizer.py @@ -15,7 +15,7 @@ from __future__ import print_function import re from collections import defaultdict -from paddle.fluid.framework import Program, Variable, name_scope +from paddle.fluid.framework import Program, Variable, name_scope, default_main_program from . import framework from . import layers from .backward import append_backward @@ -111,7 +111,8 @@ class Optimizer(object): if param_lr == 1.0: return self._global_learning_rate() else: - return self._global_learning_rate() * param_lr + with default_main_program()._lr_schedule_guard(): + return self._global_learning_rate() * param_lr def _create_accumulators(self, block, parameters): """Create all accumulators needed by the parameters -- GitLab