From f5807670a7d4a00ae95b0fb566dee6ccf39da7cd Mon Sep 17 00:00:00 2001 From: yangyaming Date: Thu, 7 Sep 2017 11:21:25 +0800 Subject: [PATCH] Fix typos and use HOSTDEVICE instead. --- paddle/operators/smooth_l1_loss_op.h | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/paddle/operators/smooth_l1_loss_op.h b/paddle/operators/smooth_l1_loss_op.h index 3e474038584..bb823a56a32 100644 --- a/paddle/operators/smooth_l1_loss_op.h +++ b/paddle/operators/smooth_l1_loss_op.h @@ -15,6 +15,7 @@ #pragma once #include "paddle/framework/eigen.h" #include "paddle/framework/op_registry.h" +#include "paddle/platform/hostdevice.h" namespace paddle { namespace operators { @@ -28,10 +29,10 @@ template ; template -struct SmoothL1LossFoward { - __host__ __device__ SmoothL1LossFoward(const T& sigma2) : sigma2(sigma2) {} +struct SmoothL1LossForward { + HOSTDEVICE SmoothL1LossForward(const T& sigma2) : sigma2(sigma2) {} - __host__ __device__ T operator()(const T& val) const { + HOSTDEVICE T operator()(const T& val) const { T abs_val = std::abs(val); if (abs_val < 1.0 / sigma2) { return 0.5 * val * val * sigma2; @@ -80,7 +81,7 @@ class SmoothL1LossKernel : public framework::OpKernel { context.GetPlace()); auto errors = EigenVector::Flatten(paddle_errors); // apply smooth l1 forward - errors.device(place) = diff.unaryExpr(SmoothL1LossFoward(sigma2)); + errors.device(place) = diff.unaryExpr(SmoothL1LossForward(sigma2)); // multiply outside weight if (has_weight) { @@ -99,9 +100,9 @@ class SmoothL1LossKernel : public framework::OpKernel { template struct SmoothL1LossBackward { - __host__ __device__ SmoothL1LossBackward(const T& sigma2) : sigma2(sigma2) {} + HOSTDEVICE SmoothL1LossBackward(const T& sigma2) : sigma2(sigma2) {} - __host__ __device__ T operator()(const T& val) const { + HOSTDEVICE T operator()(const T& val) const { T abs_val = std::abs(val); if (abs_val < 1.0 / sigma2) { return sigma2 * val; -- GitLab