提交 eb78c1d8 编写于 作者: J jiaopu 提交者: MaxwellDing

CHECK some param

上级 b50af450
......@@ -50,6 +50,9 @@ int ConvConverter(void* ctx, OpLite* op, KernelBase* kernel) {
CHECK_EQ(input_dims.size(), 4);
CHECK_EQ(filter_dims.size(), 4);
CHECK(!(op_info->HasAttr("fuse_relu") &&
(op_info->GetAttr<bool>("fuse_relu") == true)))
<< "UnSupported param fuse_relu is true!";
const auto strides = op_info->GetAttr<std::vector<int>>("strides");
auto dilations = op_info->GetAttr<std::vector<int>>("dilations");
auto paddings = op_info->GetAttr<std::vector<int>>("paddings");
......
......@@ -44,6 +44,8 @@ int DropoutConverter(void* ctx, OpLite* op, KernelBase* kernel) {
// CHECK(is_test != true);
// }
// Param fix_seed and seed is useless in MLU
auto dropout_implementation =
op_info->GetAttr<std::string>("dropout_implementation");
auto dropout_prob = op_info->GetAttr<float>("dropout_prob");
......
......@@ -55,6 +55,9 @@ int PoolConverter(void* ctx, OpLite* op, KernelBase* kernel) {
auto global_pooling = op_info->GetAttr<bool>("global_pooling");
auto ksize = op_info->GetAttr<std::vector<int>>("ksize");
auto strides = op_info->GetAttr<std::vector<int>>("strides");
CHECK(!(op_info->HasAttr("exclusive") &&
op_info->GetAttr<bool>("exclusive") == false))
<< "Unsupport param exclusive is false!";
if (paddings.size() == 2L) {
for (size_t i = 0; i < 2L; ++i) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册