/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #ifndef HL_ACTIVATION_FUNCTIONS_H_ #define HL_ACTIVATION_FUNCTIONS_H_ #include "hl_functions.h" #include "paddle/operators/math/lstm_compute.h" /** * Active functions: sigmoid, relu, tanh and linear. */ #define FLOAT_ACTIVE_FUNCTION \ { \ hppl::typef::sigmoid, hppl::typef::relu, hppl::typef::tanh, \ hppl::typef::linear \ } #define DOUBLE_ACTIVE_FUNCTION \ { \ hppl::typed::sigmoid, hppl::typed::relu, hppl::typed::tanh, \ hppl::typed::linear \ } #define AVX_ACTIVE_FUNCTION \ { hppl::sigmoid, hppl::relu, hppl::tanh, hppl::linear } namespace hppl { using activation_mode_t = paddle::operators::math::activation_mode_t; /** * Hppl supports sigmoid, relu, tanh, linear active functions * for neural networks' forward and backward activation. */ template class Active { public: typedef T (*forward)(T); typedef T (*backward)(T, T); }; template struct ForwardActType; template <> struct ForwardActType { using type = Active::forward; }; template <> struct ForwardActType { using type = Active::forward; }; template struct BackwardActType; template <> struct BackwardActType { using type = Active::backward; }; template <> struct BackwardActType { using type = Active::backward; }; #ifdef __NVCC__ namespace gpu { static __device__ Active::forward forward[] = FLOAT_ACTIVE_FUNCTION; static __device__ Active::backward backward[] = FLOAT_ACTIVE_FUNCTION; static __device__ Active::forward forward_d[] = DOUBLE_ACTIVE_FUNCTION; static __device__ Active::backward backward_d[] = DOUBLE_ACTIVE_FUNCTION; template struct ForwardAct { __device__ typename ForwardActType::type operator()( activation_mode_t type); }; template <> struct ForwardAct { __device__ ForwardActType::type operator()(activation_mode_t type) { return forward[type]; } }; template <> struct ForwardAct { __device__ ForwardActType::type operator()(activation_mode_t type) { return forward_d[type]; } }; template struct BackwardAct { __device__ typename BackwardActType::type operator()( activation_mode_t type); }; template <> struct BackwardAct { __device__ BackwardActType::type operator()(activation_mode_t type) { return backward[type]; } }; template <> struct BackwardAct { __device__ BackwardActType::type operator()(activation_mode_t type) { return backward_d[type]; } }; } // namespace gpu #else namespace cpu { static Active::forward forward[] = FLOAT_ACTIVE_FUNCTION; static Active::backward backward[] = FLOAT_ACTIVE_FUNCTION; static Active::forward forward_d[] = DOUBLE_ACTIVE_FUNCTION; static Active::backward backward_d[] = DOUBLE_ACTIVE_FUNCTION; template struct ForwardAct { typename ForwardActType::type operator()(activation_mode_t type); }; template <> struct ForwardAct { ForwardActType::type operator()(activation_mode_t type) { return forward[type]; } }; template <> struct ForwardAct { ForwardActType::type operator()(activation_mode_t type) { return forward_d[type]; } }; template struct BackwardAct { typename BackwardActType::type operator()(activation_mode_t type); }; template <> struct BackwardAct { BackwardActType::type operator()(activation_mode_t type) { return backward[type]; } }; template <> struct BackwardAct { BackwardActType::type operator()(activation_mode_t type) { return backward_d[type]; } }; } // namespace cpu #ifdef __AVX__ namespace avx { static Active<__m256>::forward forward[] = AVX_ACTIVE_FUNCTION; static Active<__m256>::backward backward[] = AVX_ACTIVE_FUNCTION; } // namespace avx #endif #endif } // namespace hppl #endif // HL_ACTIVATION_FUNCTIONS_H_