diff --git a/paddle/gserver/activations/ActivationFunction.cpp b/paddle/gserver/activations/ActivationFunction.cpp index 8b7b2e9b65898950e036ebc023cd28990cef303f..f5a41b66bf09a4abc5ae7b64f227ca52461408f5 100644 --- a/paddle/gserver/activations/ActivationFunction.cpp +++ b/paddle/gserver/activations/ActivationFunction.cpp @@ -212,6 +212,37 @@ Error __must_check backward(Argument& act) { } END_DEFINE_ACTIVATION(sequence_softmax) +/* + * @brief SoftSign Activation. + * \f[ + * f(z) = \frac{z}{1 + |z|} + * \f] + */ +BEGIN_DEFINE_ACTIVATION(softsign) +private: +MatrixPtr denominator_; + +Error __must_check forward(Argument& act) { + size_t height = act.value->getHeight(); + size_t width = act.value->getWidth(); + Matrix::resizeOrCreate( + denominator_, height, width, false, useGpu(act.deviceId)); + denominator_->assign(*act.value); + denominator_->abs2(); + denominator_->add(1.); + + act.value->dotDiv(*act.value, *denominator_); + return Error(); +} + +Error __must_check backward(Argument& act) { + denominator_->square2(); + denominator_->scalarDiv(*denominator_, 1.); + act.grad->dotMul(*act.grad, *denominator_); + return Error(); +} +END_DEFINE_ACTIVATION(softsign) + /** * @brief Relu Activation. * forward. y = max(0, z) diff --git a/python/paddle/trainer_config_helpers/activations.py b/python/paddle/trainer_config_helpers/activations.py index c749fa827fea4a808ab715dcb3442aa24d06a4d2..b4c6e7fc305adacdaa64fad857474ea1ee2cdc97 100644 --- a/python/paddle/trainer_config_helpers/activations.py +++ b/python/paddle/trainer_config_helpers/activations.py @@ -17,7 +17,8 @@ __all__ = [ "IdentityActivation", "LinearActivation", 'SequenceSoftmaxActivation', 'ExpActivation', "ReluActivation", "BReluActivation", "SoftReluActivation", "STanhActivation", "AbsActivation", "SquareActivation", "BaseActivation", - "LogActivation", "SqrtActivation", "ReciprocalActivation" + "LogActivation", "SqrtActivation", "ReciprocalActivation", + "SoftSignActivation" ] @@ -243,8 +244,20 @@ class ReciprocalActivation(BaseActivation): Reciprocal Activation. .. math:: - f(z) = 1/z + f(z)=\\frac{1}{z} """ def __init__(self): BaseActivation.__init__(self, 'reciprocal', False) + + +class SoftSignActivation(BaseActivation): + """ + SoftSign Activation. + + .. math:: + f(z)=\\frac{1}{1 + |z|} + """ + + def __init__(self): + BaseActivation.__init__(self, 'softsign', False)