From a049dff78f669a9b25ae09d69676d13c37aaf6b8 Mon Sep 17 00:00:00 2001 From: GaoWei8 <53294385+GaoWei8@users.noreply.github.com> Date: Wed, 25 Nov 2020 20:31:15 +0800 Subject: [PATCH] Modify the default setting of softmax cudnn (#28672) --- python/paddle/fluid/layers/nn.py | 2 +- python/paddle/nn/functional/activation.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 429b9b0b5a..6b1e782239 100755 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -1198,7 +1198,7 @@ def chunk_eval(input, @deprecated(since="2.0.0", update_to="paddle.nn.functional.softmax") -def softmax(input, use_cudnn=False, name=None, axis=-1): +def softmax(input, use_cudnn=True, name=None, axis=-1): r""" This operator implements the softmax layer. The calculation process is as follows: diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 915668de19..45ffd422ac 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -843,7 +843,7 @@ def softmax(x, axis=-1, dtype=None, name=None): if (dtype is not None) and (not isinstance(dtype, core.VarDesc.VarType)): dtype = convert_np_dtype_to_dtype_(dtype) - use_cudnn = True if axis is -1 else False + use_cudnn = True if in_dygraph_mode(): outs_cast = x if dtype is None \ -- GitLab