From 8abca21d253ae27d16c2a80d1b2c77158b495945 Mon Sep 17 00:00:00 2001 From: lilei Date: Mon, 6 Jul 2020 15:01:55 +0800 Subject: [PATCH] modify the loss scale annotation --- mindspore/nn/optim/proximal_ada_grad.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mindspore/nn/optim/proximal_ada_grad.py b/mindspore/nn/optim/proximal_ada_grad.py index 75f3994e2..353006512 100644 --- a/mindspore/nn/optim/proximal_ada_grad.py +++ b/mindspore/nn/optim/proximal_ada_grad.py @@ -71,7 +71,7 @@ class ProximalAdagrad(Optimizer): l1 (float): l1 regularization strength, must be greater than or equal to zero. Default: 0.0. l2 (float): l2 regularization strength, must be greater than or equal to zero. Default: 0.0. use_locking (bool): If True use locks for update operation. Default: False. - loss_scale (float): Value for the loss scale. It should be equal to or greater than 1.0. Default: 1.0. + loss_scale (float): Value for the loss scale. It should be greater than 0.0. Default: 1.0. wegith_decay (float): Weight decay value to multiply weight, must be zero or positive value. Default: 0.0. Inputs: -- GitLab