diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index d9d6dc4f26520f8e0cb3406a61b7fd1fd83eaba1..099ec3449d7acf22880a922d9be3bd0a2a9f0874 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -563,9 +563,6 @@ void AnalysisPredictor::PrepareArgument() { if (!config_.model_dir().empty()) { argument_.SetModelDir(config_.model_dir()); } else { - PADDLE_ENFORCE_EQ(config_.params_file().empty(), false, - platform::errors::PreconditionNotMet( - "Either model_dir or param_file should be set.")); PADDLE_ENFORCE_EQ(config_.prog_file().empty(), false, platform::errors::PreconditionNotMet( "Either model_dir or prog_file should be set.")); @@ -1089,7 +1086,7 @@ bool AnalysisPredictor::LoadProgramDesc() { std::string filename; if (!config_.model_dir().empty()) { filename = config_.model_dir() + "/__model__"; - } else if (!config_.prog_file().empty() && !config_.params_file().empty()) { + } else if (!config_.prog_file().empty()) { // All parameters are saved in a single file. // The file names should be consistent with that used // in Python API `fluid.io.save_inference_model`.