提交 5aeba82a 编写于 作者: M mindspore-ci-bot 提交者: Gitee

!2112 add warmup_steps param check in AdamWeightDecayDynamicLR optimizer

Merge pull request !2112 from yoonlee666/adam
......@@ -391,6 +391,7 @@ class AdamWeightDecayDynamicLR(Optimizer):
raise RuntimeError(f"The {self.cls_name} optimizer cannot support group setting.")
_check_param_value(beta1, beta2, eps, weight_decay, self.cls_name)
_check_learning_rate_value(learning_rate, end_learning_rate, decay_steps, power, self.cls_name)
validator.check_integer('warmup_steps', warmup_steps, 0, Rel.GE, self.cls_name)
# turn them to scalar when me support scalar/tensor mix operations
self.global_step = Parameter(initializer(0, [1]), name="global_step")
self.warmup_steps = Tensor(np.array([warmup_steps]).astype(np.float32))
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册