未验证 提交 b628c316 编写于 作者: F feng_shuai 提交者: GitHub

fix:delete macro INFERENCE (#37130)

上级 df7cc457
...@@ -74,16 +74,9 @@ class SoftmaxKernel : public framework::OpKernel<T> { ...@@ -74,16 +74,9 @@ class SoftmaxKernel : public framework::OpKernel<T> {
Tensor X_2d, Out_2d; Tensor X_2d, Out_2d;
X_2d.ShareDataWith(*X).Resize({n, d}); X_2d.ShareDataWith(*X).Resize({n, d});
Out_2d.ShareDataWith(*Out).Resize({n, d}); Out_2d.ShareDataWith(*Out).Resize({n, d});
#ifdef PADDLE_ON_INFERENCE
math::SoftmaxFunctor<DeviceContext, T, true>()(
context.template device_context<DeviceContext>(), axis_dim, &X_2d,
&Out_2d);
#else
math::SoftmaxFunctor<DeviceContext, T, false>()( math::SoftmaxFunctor<DeviceContext, T, false>()(
context.template device_context<DeviceContext>(), axis_dim, &X_2d, context.template device_context<DeviceContext>(), axis_dim, &X_2d,
&Out_2d); &Out_2d);
#endif
} }
}; };
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册