From 3d77360b8978c512900cde3bacc05444785beac9 Mon Sep 17 00:00:00 2001 From: caoying03 Date: Tue, 26 Sep 2017 17:49:10 +0800 Subject: [PATCH] add negative clipping for softmax. --- paddle/operators/math/softmax.h | 11 ++++++++++- python/paddle/v2/framework/tests/test_softmax_op.py | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/paddle/operators/math/softmax.h b/paddle/operators/math/softmax.h index 08dafed971e..3d2f0d0aecf 100644 --- a/paddle/operators/math/softmax.h +++ b/paddle/operators/math/softmax.h @@ -25,6 +25,14 @@ template using EigenMatrix = framework::EigenMatrix; +template +struct ValueClip { + HOSTDEVICE T operator()(const T& x) const { + const T kThreshold = -64.; + return x < kThreshold ? kThreshold : x; + } +}; + template class SoftmaxFunctor { public: @@ -47,7 +55,8 @@ class SoftmaxFunctor { logits.maximum(along_class) .eval() .reshape(batch_by_one) - .broadcast(one_by_class)); + .broadcast(one_by_class)) + .unaryExpr(ValueClip()); softmax.device(context.GetEigenDevice()) = shifted_logits.exp(); softmax.device(context.GetEigenDevice()) = diff --git a/python/paddle/v2/framework/tests/test_softmax_op.py b/python/paddle/v2/framework/tests/test_softmax_op.py index 1b948f252fa..b41c810d9a6 100644 --- a/python/paddle/v2/framework/tests/test_softmax_op.py +++ b/python/paddle/v2/framework/tests/test_softmax_op.py @@ -5,7 +5,7 @@ from op_test import OpTest def stable_softmax(x): """Compute the softmax of vector x in a numerically stable way.""" - shiftx = x - np.max(x) + shiftx = x - np.max(x).clip(-64.) exps = np.exp(shiftx) return exps / np.sum(exps) -- GitLab