diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 0ec80b3c5bb931e06431fd581f60682fb3d4ad61..4dc3e3a62ec5d9b78c95feb231b0d468ec7e55bf 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -1110,15 +1110,15 @@ def softmax(x, axis=-1, dtype=None, name=None): use_cudnn = True if dtype is None: check_variable_and_dtype( - x, 'x', ['float16', 'float32', 'float64'], 'softmax' + x, 'x', ['float16', 'bfloat16', 'float32', 'float64'], 'softmax' ) else: check_dtype( dtype, 'dtype', - ['float32', 'float64'], + ['float16', 'bfloat16', 'float32', 'float64'], 'softmax', - 'If dtype is not None, it only support float32 or float64.', + 'If dtype is not None, it only support float16, bfloat16, float32 or float64.', ) helper = LayerHelper("softmax", **locals()) diff --git a/python/paddle/nn/layer/activation.py b/python/paddle/nn/layer/activation.py index ebe305d7869d1cdbf9e81c8daa59c815cd8aa3f3..6c85ae646b71c8af713d46c210dc805034d17754 100644 --- a/python/paddle/nn/layer/activation.py +++ b/python/paddle/nn/layer/activation.py @@ -1324,7 +1324,7 @@ class Softmax(Layer): self._name = name def forward(self, x): - return F.softmax(x, self._axis, self._dtype, self._name) + return F.softmax(x, self._axis, name=self._name) def extra_repr(self): name_str = ', name={}'.format(self._name) if self._name else ''