From 33afeb315af10c294e5f4ae340af34b2bfafd51e Mon Sep 17 00:00:00 2001 From: ShenLiang Date: Wed, 26 Aug 2020 11:37:42 +0800 Subject: [PATCH] fix the tanh (#26657) * fix the tanh * fix the learning rate --- python/paddle/optimizer/optimizer.py | 7 +++++-- python/paddle/tensor/math.py | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/python/paddle/optimizer/optimizer.py b/python/paddle/optimizer/optimizer.py index 2c2f6f1ce7..cb602ff0b3 100644 --- a/python/paddle/optimizer/optimizer.py +++ b/python/paddle/optimizer/optimizer.py @@ -21,6 +21,7 @@ from collections import defaultdict from paddle.fluid.distribute_lookup_table import find_distributed_lookup_table from paddle.fluid.framework import Program, Variable, name_scope, default_main_program, default_startup_program, device_guard +import paddle from ..fluid import framework from ..fluid import layers @@ -308,7 +309,8 @@ class Optimizer(object): name=unique_name.generate("learning_rate"), shape=[1], value=float(self._learning_rate), - dtype='float32' if self._dtype is None else self._dtype, + dtype=paddle.get_default_dtype() + if self._dtype is None else self._dtype, persistable=True) # get learning rate Tensor from LearningRateDecay elif isinstance(self._learning_rate, LearningRateDecay): @@ -336,7 +338,8 @@ class Optimizer(object): name=unique_name.generate("learning_rate"), shape=[1], value=float(self._learning_rate), - dtype='float32' if self._dtype is None else self._dtype, + dtype=paddle.get_default_dtype() + if self._dtype is None else self._dtype, persistable=True) @framework.dygraph_only diff --git a/python/paddle/tensor/math.py b/python/paddle/tensor/math.py index 4c7eef5fa6..9dfb31a5ac 100755 --- a/python/paddle/tensor/math.py +++ b/python/paddle/tensor/math.py @@ -2090,6 +2090,7 @@ def tanh(x, name=None): return core.ops.tanh(x) check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], 'tanh') + check_type(x, 'x', (Variable), 'tanh') helper = LayerHelper('tanh', **locals()) out = helper.create_variable_for_type_inference(x.dtype) helper.append_op(type='tanh', inputs={'X': x}, outputs={'Out': out}) -- GitLab