From bd2fd31ab386bf058985240440ae2f36feb92553 Mon Sep 17 00:00:00 2001 From: simson <526422051@qq.com> Date: Tue, 5 May 2020 11:14:57 +0800 Subject: [PATCH] revert limitation of end_learning_rate --- mindspore/nn/optim/lamb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mindspore/nn/optim/lamb.py b/mindspore/nn/optim/lamb.py index e026b1c56..97a81a590 100755 --- a/mindspore/nn/optim/lamb.py +++ b/mindspore/nn/optim/lamb.py @@ -114,7 +114,7 @@ def _check_param_value(decay_steps, warmup_steps, start_learning_rate, _ = warmup_steps validator.check_float_positive('start_learning_rate', start_learning_rate, prim_name) validator.check_float_legal_value('start_learning_rate', start_learning_rate, prim_name) - validator.check_float_positive('end_learning_rate', end_learning_rate, prim_name) + validator.check_value_type("end_learning_rate", end_learning_rate, [float], prim_name) validator.check_float_legal_value('end_learning_rate', end_learning_rate, prim_name) validator.check_float_positive('power', power, prim_name) validator.check_float_legal_value('power', power, prim_name) -- GitLab