From 47d6bc86ae078d46c2c0a12918165bd53dad04c5 Mon Sep 17 00:00:00 2001 From: Tomasz Socha Date: Wed, 22 Sep 2021 17:24:19 +0200 Subject: [PATCH] Fix copy elision warning (#35885) * Fix copy elision warning * Remove redundand code --- paddle/fluid/framework/fleet/gloo_wrapper.h | 2 +- paddle/fluid/framework/ir/fc_gru_fuse_pass_tester.h | 2 +- paddle/fluid/framework/ir/fc_lstm_fuse_pass_tester.h | 2 +- .../framework/new_executor/interpretercore_util.cc | 4 ++-- paddle/fluid/framework/new_executor/workqueue.cc | 6 +++--- paddle/fluid/inference/api/api_impl.cc | 12 ------------ paddle/fluid/memory/allocation/mmap_allocator.cc | 2 +- paddle/fluid/pybind/inference_api.cc | 2 +- paddle/fluid/string/string_helper.h | 2 +- 9 files changed, 11 insertions(+), 23 deletions(-) diff --git a/paddle/fluid/framework/fleet/gloo_wrapper.h b/paddle/fluid/framework/fleet/gloo_wrapper.h index e69439892ca..4eb40da1bfd 100644 --- a/paddle/fluid/framework/fleet/gloo_wrapper.h +++ b/paddle/fluid/framework/fleet/gloo_wrapper.h @@ -215,7 +215,7 @@ class GlooWrapper { #else LOG(WARNING) << "AllGather does nothing when WITH_GLOO=OFF"; #endif - return std::move(ret); + return ret; } protected: diff --git a/paddle/fluid/framework/ir/fc_gru_fuse_pass_tester.h b/paddle/fluid/framework/ir/fc_gru_fuse_pass_tester.h index a862755d604..df3fbc293b7 100644 --- a/paddle/fluid/framework/ir/fc_gru_fuse_pass_tester.h +++ b/paddle/fluid/framework/ir/fc_gru_fuse_pass_tester.h @@ -88,7 +88,7 @@ std::unique_ptr PrepareGraph( nullptr, false, false, activation, gate_activation); std::unique_ptr graph(new ir::Graph(layers.main_program())); - return std::move(graph); + return graph; } } // namespace fc_gru_test } // namespace ir diff --git a/paddle/fluid/framework/ir/fc_lstm_fuse_pass_tester.h b/paddle/fluid/framework/ir/fc_lstm_fuse_pass_tester.h index f681a2b7ff8..a313e49f0b2 100644 --- a/paddle/fluid/framework/ir/fc_lstm_fuse_pass_tester.h +++ b/paddle/fluid/framework/ir/fc_lstm_fuse_pass_tester.h @@ -91,7 +91,7 @@ std::unique_ptr PrepareGraph( false, gate_activation, cell_activation, candidate_activation); std::unique_ptr graph(new ir::Graph(layers.main_program())); - return std::move(graph); + return graph; } } // namespace fc_lstm_test diff --git a/paddle/fluid/framework/new_executor/interpretercore_util.cc b/paddle/fluid/framework/new_executor/interpretercore_util.cc index 91f334ffefd..16df5d794f4 100644 --- a/paddle/fluid/framework/new_executor/interpretercore_util.cc +++ b/paddle/fluid/framework/new_executor/interpretercore_util.cc @@ -25,7 +25,7 @@ AtomicVectorSizeT AsyncWorkQueue::PrepareAtomicDeps( working_dependecy_count[i] = std::make_unique>(dependecy_count[i]); } - return std::move(working_dependecy_count); + return working_dependecy_count; } AtomicVectorSizeT AsyncWorkQueue::PrepareAtomicVarRef( @@ -36,7 +36,7 @@ AtomicVectorSizeT AsyncWorkQueue::PrepareAtomicVarRef( working_var_ref[i] = std::make_unique>(vec_meta_info[i].var_ref_count_); } - return std::move(working_var_ref); + return working_var_ref; } bool var_can_be_deleted(const std::string& name, const BlockDesc& block) { diff --git a/paddle/fluid/framework/new_executor/workqueue.cc b/paddle/fluid/framework/new_executor/workqueue.cc index 184d9d69984..eebcfe60182 100644 --- a/paddle/fluid/framework/new_executor/workqueue.cc +++ b/paddle/fluid/framework/new_executor/workqueue.cc @@ -147,7 +147,7 @@ std::unique_ptr CreateSingleThreadedWorkQueue( "For a SingleThreadedWorkQueue, " "WorkQueueOptions.num_threads must equals to 1.")); std::unique_ptr ptr(new WorkQueueImpl(options)); - return std::move(ptr); + return ptr; } std::unique_ptr CreateMultiThreadedWorkQueue( @@ -158,7 +158,7 @@ std::unique_ptr CreateMultiThreadedWorkQueue( "WorkQueueOptions.num_threads must be " "greater than 1.")); std::unique_ptr ptr(new WorkQueueImpl(options)); - return std::move(ptr); + return ptr; } std::unique_ptr CreateWorkQueueGroup( @@ -168,7 +168,7 @@ std::unique_ptr CreateWorkQueueGroup( "For a WorkQueueGroup, the number of WorkQueueOptions " "must be greater than 1.")); std::unique_ptr ptr(new WorkQueueGroupImpl(queues_options)); - return std::move(ptr); + return ptr; } } // namespace framework diff --git a/paddle/fluid/inference/api/api_impl.cc b/paddle/fluid/inference/api/api_impl.cc index bb104015947..c1a0cb4be44 100644 --- a/paddle/fluid/inference/api/api_impl.cc +++ b/paddle/fluid/inference/api/api_impl.cc @@ -192,14 +192,7 @@ std::unique_ptr NativePaddlePredictor::Clone() { LOG(ERROR) << "fail to call Init"; return nullptr; } - -#ifdef __clang__ - // fix clang compile error return cls; -#else - // fix manylinux compile error. - return std::move(cls); -#endif } bool NativePaddlePredictor::SetFeed(const std::vector &inputs, @@ -390,12 +383,7 @@ std::unique_ptr CreatePaddlePredictor< if (!dynamic_cast(predictor.get())->Init(nullptr)) { return nullptr; } -#ifdef __clang__ - // fix clang compile error return predictor; -#else - return std::move(predictor); -#endif } template <> diff --git a/paddle/fluid/memory/allocation/mmap_allocator.cc b/paddle/fluid/memory/allocation/mmap_allocator.cc index 3fff18b9bc3..acaf5d54855 100644 --- a/paddle/fluid/memory/allocation/mmap_allocator.cc +++ b/paddle/fluid/memory/allocation/mmap_allocator.cc @@ -59,7 +59,7 @@ std::string GetIPCName() { #endif handle += "_"; handle += std::to_string(rd()); - return std::move(handle); + return handle; } std::shared_ptr AllocateMemoryMapWriterAllocation( diff --git a/paddle/fluid/pybind/inference_api.cc b/paddle/fluid/pybind/inference_api.cc index a18cbfb86fe..8ce7bea2d8e 100644 --- a/paddle/fluid/pybind/inference_api.cc +++ b/paddle/fluid/pybind/inference_api.cc @@ -324,7 +324,7 @@ void BindInferenceApi(py::module *m) { auto pred = std::unique_ptr( new paddle_infer::Predictor(config)); - return std::move(pred); + return pred; }); m->def("copy_tensor", &CopyPaddleInferTensor); m->def("paddle_dtype_size", &paddle::PaddleDtypeSize); diff --git a/paddle/fluid/string/string_helper.h b/paddle/fluid/string/string_helper.h index 37b713766dd..1ab7690f8b5 100644 --- a/paddle/fluid/string/string_helper.h +++ b/paddle/fluid/string/string_helper.h @@ -53,7 +53,7 @@ template std::string format_string(const char* fmt, ARGS&&... args) { std::string str; format_string_append(str, fmt, args...); - return std::move(str); + return str; } template -- GitLab