From 42359e88a409822581e42f64d21f675a90441cd3 Mon Sep 17 00:00:00 2001 From: Tao Luo Date: Thu, 6 Dec 2018 17:29:27 +0800 Subject: [PATCH] clean code test=develop --- .../fluid/inference/api/analysis_predictor.cc | 9 +-- paddle/fluid/inference/io.cc | 1 - paddle/fluid/operators/impl/CMakeLists.txt | 1 - paddle/fluid/operators/impl/load_combine.cc | 61 ------------------- paddle/fluid/operators/impl/load_combine.h | 33 ---------- 5 files changed, 5 insertions(+), 100 deletions(-) delete mode 100644 paddle/fluid/operators/impl/CMakeLists.txt delete mode 100644 paddle/fluid/operators/impl/load_combine.cc delete mode 100644 paddle/fluid/operators/impl/load_combine.h diff --git a/paddle/fluid/inference/api/analysis_predictor.cc b/paddle/fluid/inference/api/analysis_predictor.cc index 6091c5a625..f70c12dc87 100644 --- a/paddle/fluid/inference/api/analysis_predictor.cc +++ b/paddle/fluid/inference/api/analysis_predictor.cc @@ -304,7 +304,6 @@ bool AnalysisPredictor::GetFetch(std::vector *outputs, // NOTE All the members in AnalysisConfig should be copied to Argument. void AnalysisPredictor::OptimizeInferenceProgram() { - LOG(INFO) << "optimization program"; status_program_optimized_ = true; argument_.SetUseGPU(config_.use_gpu); @@ -313,11 +312,13 @@ void AnalysisPredictor::OptimizeInferenceProgram() { // Analyze inference_program if (!config_.model_dir.empty()) { argument_.SetModelDir(config_.model_dir); - } else if (!config_.param_file.empty() && !config_.prog_file.empty()) { + } else { + PADDLE_ENFORCE( + !config_.param_file.empty(), + "Either model_dir or (param_file, prog_file) should be set."); + PADDLE_ENFORCE(!config_.prog_file.empty()); argument_.SetModelProgramPath(config_.prog_file); argument_.SetModelParamsPath(config_.param_file); - } else { - PADDLE_THROW("Either model_dir or (param_file, prog_file) should be set."); } if (config_.use_gpu && config_.use_tensorrt_) { diff --git a/paddle/fluid/inference/io.cc b/paddle/fluid/inference/io.cc index 053c516f88..060d6a89d4 100644 --- a/paddle/fluid/inference/io.cc +++ b/paddle/fluid/inference/io.cc @@ -21,7 +21,6 @@ limitations under the License. */ #include "paddle/fluid/framework/feed_fetch_type.h" #include "paddle/fluid/framework/op_registry.h" #include "paddle/fluid/framework/version.h" -#include "paddle/fluid/operators/impl/load_combine.h" #include "paddle/fluid/platform/cpu_helper.h" #include "paddle/fluid/pybind/pybind.h" diff --git a/paddle/fluid/operators/impl/CMakeLists.txt b/paddle/fluid/operators/impl/CMakeLists.txt deleted file mode 100644 index 1c0ff7c3d9..0000000000 --- a/paddle/fluid/operators/impl/CMakeLists.txt +++ /dev/null @@ -1 +0,0 @@ -cc_library(load_combine_impl SRCS load_combine.cc DEPS scope lod_tensor device_context op_registry data_type_transform) diff --git a/paddle/fluid/operators/impl/load_combine.cc b/paddle/fluid/operators/impl/load_combine.cc deleted file mode 100644 index 454d583a10..0000000000 --- a/paddle/fluid/operators/impl/load_combine.cc +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "paddle/fluid/operators/impl/load_combine.h" - -namespace paddle { -namespace operators { -namespace impl { - -void LoadParamsFromStream(const std::vector &out_var_names, - const paddle::platform::Place &place, - bool load_as_fp16, std::istream *buffer, - const paddle::framework::Scope *scope) { - auto *dev_ctx = platform::DeviceContextPool::Instance().Get(place); - for (size_t i = 0; i < out_var_names.size(); i++) { - auto *out_var = scope->FindVar(out_var_names[i]); - - PADDLE_ENFORCE(out_var != nullptr, "Output variable %s cannot be found", - out_var_names[i]); - - auto *tensor = out_var->GetMutable(); - - // Get data from fin to tensor - DeserializeFromStream(*buffer, tensor, *dev_ctx); - - auto in_dtype = framework::ToDataType(tensor->type()); - auto out_dtype = load_as_fp16 ? framework::proto::VarType::FP16 : in_dtype; - - if (in_dtype != out_dtype) { - // convert to float16 tensor - auto in_kernel_type = framework::OpKernelType(in_dtype, place); - auto out_kernel_type = framework::OpKernelType(out_dtype, place); - framework::LoDTensor fp16_tensor; - // copy LoD info to the new tensor - fp16_tensor.set_lod(tensor->lod()); - framework::TransDataType(in_kernel_type, out_kernel_type, *tensor, - &fp16_tensor); - - // reset output tensor - out_var->Clear(); - tensor = out_var->GetMutable(); - tensor->set_lod(fp16_tensor.lod()); - tensor->ShareDataWith(fp16_tensor); - } - } -} - -} // namespace impl -} // namespace operators -} // namespace paddle diff --git a/paddle/fluid/operators/impl/load_combine.h b/paddle/fluid/operators/impl/load_combine.h deleted file mode 100644 index 53ffcaf43a..0000000000 --- a/paddle/fluid/operators/impl/load_combine.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#pragma once -#include -#include -#include "paddle/fluid/framework/data_type_transform.h" -#include "paddle/fluid/framework/op_registry.h" -#include "paddle/fluid/platform/device_context.h" - -namespace paddle { -namespace operators { -namespace impl { - -// Load parameters from a single stream. -void LoadParamsFromStream(const std::vector &out_var_names, - const platform::Place &place, bool load_as_fp16, - std::istream *buffer, const framework::Scope *scope); - -} // namespace impl -} // namespace operators -} // namespace paddle -- GitLab