diff --git a/paddle/fluid/framework/op_compatible_info.cc b/paddle/fluid/framework/op_compatible_info.cc index 934f6828112fe72b4902a6a996af10c548c3f5ff..d0702081d1fee1477f9b1676e4ba11815caba5df 100644 --- a/paddle/fluid/framework/op_compatible_info.cc +++ b/paddle/fluid/framework/op_compatible_info.cc @@ -215,5 +215,50 @@ bool OpCompatibleMap::ReadFromProto(const proto::OpCompatibleMap& desc) { return true; } +bool ProgOptimUnsupported(std::shared_ptr program) { + auto op_type_checker = [](const std::string& name) { + const std::vector op_types({ + "conv2d", "conv3d", "conv2d_transpose", "conv3d_transpose", + "depthwise_conv2d", "depthwise_conv2d_transpose", "pool2d", "pool3d", + }); + return std::find(op_types.begin(), op_types.end(), name) != op_types.end(); + }; + auto checker = [](const framework::OpDesc& op) { + if (op.HasAttr("paddings") && op.HasAttr("strides")) { + auto paddings = boost::get>(op.GetAttr("paddings")); + auto strides = boost::get>(op.GetAttr("strides")); + if (paddings.size() != strides.size()) { + VLOG(3) << "== paddings size is not equal to strides size."; + return true; + } + } + if (op.HasAttr("data_format")) { + auto data_format = boost::get(op.GetAttr("data_format")); + if (data_format == "NHWC" || data_format == "NDHWC") { + VLOG(3) << "== data_format is NHWC or NDHWC."; + return true; + } + } + if (op.HasAttr("padding_algorithm")) { + auto padding_algorithm = + boost::get(op.GetAttr("padding_algorithm")); + if (padding_algorithm != "EXPLICIT") { + VLOG(3) << "== padding_algorithm is not EXPLICIT."; + return true; + } + } + return false; + }; + for (size_t i = 0; i < program->Size(); i++) { + const auto& block = program->Block(i); + for (auto* op : block.AllOps()) { + if ((op_type_checker(op->Type())) && checker(*op)) { + return true; + } + } + } + return false; +} + } // namespace framework } // namespace paddle diff --git a/paddle/fluid/framework/op_compatible_info.h b/paddle/fluid/framework/op_compatible_info.h index 08b5734b5bfe33d4269c06d639448eefd26fcb06..e72d206b14476ffd4e69157242f98adf80f87411 100644 --- a/paddle/fluid/framework/op_compatible_info.h +++ b/paddle/fluid/framework/op_compatible_info.h @@ -13,6 +13,7 @@ // limitations under the License. #include +#include #include #include "paddle/fluid/framework/program_desc.h" @@ -70,5 +71,9 @@ class OpCompatibleMap { std::string default_required_version_; }; +// Determine if the model contains operators that the optimization cannot +// support. +bool ProgOptimUnsupported(std::shared_ptr program); + } // namespace framework } // namespace paddle diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index 7a628769727a74fa114ca0bf2f3c903036ec90c8..4f632946f338857813bf663284efcaedb1992785 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -145,7 +145,7 @@ bool AnalysisPredictor::PrepareProgram( // still need to create other persistable variables. // So in both case, create persistable variables at first. if (!CheckOperatorCompatible()) { - LOG(WARNING) << "WARNING: Results may be DIFF! " + LOG(WARNING) << "WARNING: Results may be incorrect! " "Using same versions between model and lib."; } executor_->CreateVariables(*inference_program_, 0, true, sub_scope_); @@ -458,6 +458,14 @@ void AnalysisPredictor::PrepareArgument() { // NOTE All the members in AnalysisConfig should be copied to Argument. void AnalysisPredictor::OptimizeInferenceProgram() { + if (ProgOptimUnsupported(inference_program_)) { + LOG(INFO) << "NOTICE: Your inference model contains parameters such " + "as asymmetric padding, and ir optimization is temporarily " + "not supported, " + "so it is turned off."; + config_.SwitchIrOptim(false); + argument_.SetEnableAnalysisOptim(false); + } PrepareArgument(); Analyzer().Run(&argument_);