提交 fc0084f4 编写于 作者: L liuqi

Replace MACE_CHECK with MACE_CHECK_NOTNULL.

上级 07280ea3
......@@ -116,7 +116,7 @@ class ActivationFunctor {
const T *input_ptr = input->data<T>();
T *output_ptr = output->mutable_data<T>();
if (activation_ == PRELU) {
MACE_CHECK(alpha != nullptr) << "PReLU's alpha parameter shouldn't be null";
MACE_CHECK_NOTNULL(alpha);
const T *alpha_ptr = alpha->data<T>();
PReLUActivation(input_ptr, output->size(), input->dim(3), alpha_ptr, output_ptr);
} else {
......
......@@ -62,7 +62,7 @@ void ActivationFunctor<DeviceType::OPENCL, T>::operator()(const Tensor *input,
int idx = 0;
kernel_.setArg(idx++, *(input->opencl_image()));
if (activation_ == PRELU) {
MACE_CHECK(alpha != nullptr) << "PReLU's alpha parameter shouldn't be null";
MACE_CHECK_NOTNULL(alpha);
kernel_.setArg(idx++, *(alpha->opencl_image()));
}
kernel_.setArg(idx++, static_cast<float>(relux_max_limit_));
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册