diff --git a/mace/kernels/activation.h b/mace/kernels/activation.h index c9f8dac5873a3cedcac4ff4cddd92c89b91ef9a3..72e52b67cfef1e3a230c78fb94edc72fd5ca397f 100644 --- a/mace/kernels/activation.h +++ b/mace/kernels/activation.h @@ -116,7 +116,7 @@ class ActivationFunctor { const T *input_ptr = input->data(); T *output_ptr = output->mutable_data(); if (activation_ == PRELU) { - MACE_CHECK(alpha != nullptr) << "PReLU's alpha parameter shouldn't be null"; + MACE_CHECK_NOTNULL(alpha); const T *alpha_ptr = alpha->data(); PReLUActivation(input_ptr, output->size(), input->dim(3), alpha_ptr, output_ptr); } else { diff --git a/mace/kernels/opencl/activation_opencl.cc b/mace/kernels/opencl/activation_opencl.cc index 9521f82e3bc6ca25feb2aa8f299e3f604279e1c5..75922a9eb17af3b9790a283efaf3b1d9581c2f8c 100644 --- a/mace/kernels/opencl/activation_opencl.cc +++ b/mace/kernels/opencl/activation_opencl.cc @@ -62,7 +62,7 @@ void ActivationFunctor::operator()(const Tensor *input, int idx = 0; kernel_.setArg(idx++, *(input->opencl_image())); if (activation_ == PRELU) { - MACE_CHECK(alpha != nullptr) << "PReLU's alpha parameter shouldn't be null"; + MACE_CHECK_NOTNULL(alpha); kernel_.setArg(idx++, *(alpha->opencl_image())); } kernel_.setArg(idx++, static_cast(relux_max_limit_));