From a5f65d516fd3b05ec3ad80549e2fb3d839cd9905 Mon Sep 17 00:00:00 2001 From: Kaipeng Deng Date: Mon, 19 Oct 2020 20:02:19 +0800 Subject: [PATCH] hapi/model step learning rate on batch end. (#27991) * hapi/model step learning rate on batch end. test=develop --- python/paddle/hapi/model.py | 18 +++++++++++++----- python/paddle/tests/test_model.py | 12 ++++++++++++ 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/python/paddle/hapi/model.py b/python/paddle/hapi/model.py index 5890d9760eb..4f36effe6dd 100644 --- a/python/paddle/hapi/model.py +++ b/python/paddle/hapi/model.py @@ -453,6 +453,12 @@ class StaticGraphAdapter(object): if len(name) > 0: rets.insert(i, feed[name]) + # step learning rate scheduler on each batch end + if self.model._optimizer and \ + isinstance(self.model._optimizer._learning_rate, + paddle.optimizer.lr.LRScheduler): + self.model._optimizer._learning_rate.step() + # LoDTensor cannot be fetch as numpy directly rets = [np.array(v) for v in rets] if self.mode == 'test': @@ -652,6 +658,13 @@ class DynamicGraphAdapter(object): self.model._optimizer.minimize(final_loss) self.model.network.clear_gradients() + + # step learning rate scheduler on each batch end + if self.model._optimizer and \ + isinstance(self.model._optimizer._learning_rate, + paddle.optimizer.lr.LRScheduler): + self.model._optimizer._learning_rate.step() + metrics = [] for metric in self.model._metrics: metric_outs = metric.compute(*(to_list(outputs) + labels)) @@ -1461,11 +1474,6 @@ class Model(object): cbks.on_end('eval', eval_logs) - # step learning rate scheduler on each epcoh end - if isinstance(self._optimizer._learning_rate, - paddle.optimizer.lr.LRScheduler): - self._optimizer._learning_rate.step() - cbks.on_end('train', logs) self._test_dataloader = None diff --git a/python/paddle/tests/test_model.py b/python/paddle/tests/test_model.py index 4e732c59eb2..bcb910a5ada 100644 --- a/python/paddle/tests/test_model.py +++ b/python/paddle/tests/test_model.py @@ -631,6 +631,7 @@ class TestModelWithLRScheduler(unittest.TestCase): parameters=parameters) return optimizer + # dynamic test device = paddle.set_device('cpu') fluid.enable_dygraph(device) net = MyModel() @@ -643,8 +644,19 @@ class TestModelWithLRScheduler(unittest.TestCase): dataset = MyDataset() model.fit(dataset, dataset, batch_size=4, epochs=10, num_workers=0) + # static test paddle.enable_static() + net = MyModel() + inputs = [InputSpec([None, 20], 'float32', 'x')] + labels = [InputSpec([None, 1], 'int64', 'label')] + optim = make_optimizer(net.parameters()) + model = Model(net, inputs, labels) + model.prepare(optimizer=optim, loss=CrossEntropyLoss(reduction="sum")) + + dataset = MyDataset() + model.fit(dataset, dataset, batch_size=4, epochs=10, num_workers=0) + class TestRaiseError(unittest.TestCase): def test_input_without_name(self): -- GitLab