提交 da9c94da 编写于 作者: G Gabor Buella 提交者: Tao Luo

Clang build fixes (#15628)

* Remove some superfluous std::move calls

The std:move triggered a build error (with -Werror):
```
[  9%] Building CXX object paddle/fluid/memory/allocation/CMakeFiles/allocator_facade.dir/allocator_facade.cc.o
/home/tej/code/gbuella_paddle/paddle/fluid/memory/allocation/allocator_facade.cc:86:29: error: moving a temporary object prevents copy elision [-Werror,-Wpessimizing-move]
            [this] { return std::move(CreateAllocatorWithChunk()); }, capacity);
                            ^
/home/tej/code/gbuella_paddle/paddle/fluid/memory/allocation/allocator_facade.cc:86:29: note: remove std::move call here
            [this] { return std::move(CreateAllocatorWithChunk()); }, capacity);
                            ^~~~~~~~~~                          ~
1 error generated.
```

See: https://reviews.llvm.org/D7633

* Remove a superfluous lambda capture from framework/operator.h

```
[ 10%] Building CXX object paddle/fluid/platform/CMakeFiles/device_context.dir/init.cc.o
In file included from /home/tej/code/gbuella_paddle/paddle/fluid/platform/init.cc:19:
/home/tej/code/gbuella_paddle/paddle/fluid/framework/operator.h:229:21: error: lambda capture 'this' is not used [-Werror,-Wunused-lambda-capture]
                   [this](Variable* var) { return var; });
                    ^~~~
1 error generated.
```

Changing it to `return it->second;`, as is in the function below.

* Rethrow an exception (instead of copying it)

```
[ 11%] Building CXX object paddle/fluid/framework/CMakeFiles/operator.dir/operator.cc.o
/home/tej/code/gbuella_paddle/paddle/fluid/framework/operator.cc:191:13: error: local variable 'exception' will be copied despite being thrown by name [-Werror,-Wreturn-std-move]
      throw exception;
            ^~~~~~~~~
/home/tej/code/gbuella_paddle/paddle/fluid/framework/operator.cc:191:13: note: call 'std::move' explicitly to avoid copying
      throw exception;
            ^~~~~~~~~
            std::move(exception)

```

See https://reviews.llvm.org/D43322 for an explanation of this diagnostic message.

* Remove an unused variable

```
/home/tej/code/gbuella_paddle/paddle/fluid/framework/operator.cc:884:16: error: private field 'scope_' is not used [-Werror,-Wunused-private-field]
  const Scope& scope_;
               ^
```

* struct ComputationOpHandle -> class ComputationOpHandle

```
[ 13%] Building CXX object paddle/fluid/framework/details/CMakeFiles/memory_early_delete_pass.dir/memory_early_delete_pass.cc.o
In file included from /home/tej/code/gbuella_paddle/paddle/fluid/framework/details/memory_early_delete_pass.cc:21:
/home/tej/code/gbuella_paddle/paddle/fluid/framework/details/reference_count_pass_helper.h:30:1: error: class 'ComputationOpHandle' was previously declared as a struct; this is valid, but may result in linker errors under the Microsoft C++ ABI [-Werror,-Wmismatched-tags]
class ComputationOpHandle;
^
/home/tej/code/gbuella_paddle/paddle/fluid/framework/details/computation_op_handle.h:29:8: note: previous use is here
struct ComputationOpHandle : public OpHandleBase {
       ^
/home/tej/code/gbuella_paddle/paddle/fluid/framework/details/reference_count_pass_helper.h:30:1: note: did you mean struct here?
class ComputationOpHandle;
^~~~~
struct
1 error generated.
```

* Fix name() methods under fluid/operators

```
In file included from /home/tej/code/gbuella_paddle/paddle/fluid/operators/jit/gen/act.cc:15:
In file included from /home/tej/code/gbuella_paddle/paddle/fluid/operators/jit/gen/act.h:19:
/home/tej/code/gbuella_paddle/paddle/fluid/operators/jit/gen/jitcode.h:71:23: error: 'name' overrides a member function but is not marked 'override' [-Werror,-Winconsistent-missing-override]
  virtual const char* name() const = 0;
                      ^
/home/tej/code/gbuella_paddle/paddle/fluid/operators/jit/gen_base.h:31:23: note: overridden virtual function is here
  virtual const char* name() const = 0;
                      ^
```

test=develop
上级 fea7f0de
...@@ -26,7 +26,7 @@ ...@@ -26,7 +26,7 @@
namespace paddle { namespace paddle {
namespace framework { namespace framework {
namespace details { namespace details {
struct ComputationOpHandle : public OpHandleBase { class ComputationOpHandle : public OpHandleBase {
public: public:
ComputationOpHandle(ir::Node *node, Scope *scope, platform::Place place, ComputationOpHandle(ir::Node *node, Scope *scope, platform::Place place,
size_t scope_idx); size_t scope_idx);
......
...@@ -65,7 +65,7 @@ FeedFetchList ParallelSSAGraphExecutor::Run( ...@@ -65,7 +65,7 @@ FeedFetchList ParallelSSAGraphExecutor::Run(
if (pool_) { if (pool_) {
run_futures.emplace_back(pool_->enqueue(std::move(call))); run_futures.emplace_back(pool_->enqueue(std::move(call)));
} else { } else {
fetch_data.emplace_back(std::move(call())); fetch_data.emplace_back(call());
} }
} }
...@@ -74,7 +74,7 @@ FeedFetchList ParallelSSAGraphExecutor::Run( ...@@ -74,7 +74,7 @@ FeedFetchList ParallelSSAGraphExecutor::Run(
if (exception_holder_.IsCaught()) { if (exception_holder_.IsCaught()) {
f.wait(); f.wait();
} else { } else {
fetch_data.emplace_back(std::move(f.get())); fetch_data.emplace_back(f.get());
} }
} }
} }
......
...@@ -76,7 +76,7 @@ std::map<std::string, std::vector<ir::Node *>> Graph::InitFromProgram( ...@@ -76,7 +76,7 @@ std::map<std::string, std::vector<ir::Node *>> Graph::InitFromProgram(
var->inputs.push_back(node); var->inputs.push_back(node);
} }
} }
return std::move(var_nodes); return var_nodes;
} }
void Graph::ResolveHazard( void Graph::ResolveHazard(
......
...@@ -188,14 +188,14 @@ void OperatorBase::Run(const Scope& scope, const platform::Place& place) { ...@@ -188,14 +188,14 @@ void OperatorBase::Run(const Scope& scope, const platform::Place& place) {
VLOG(3) << place << " " << DebugStringEx(&scope); VLOG(3) << place << " " << DebugStringEx(&scope);
} catch (platform::EnforceNotMet exception) { } catch (platform::EnforceNotMet exception) {
if (Attrs().count("sub_block") != 0) { if (Attrs().count("sub_block") != 0) {
throw exception; throw;
} }
auto& callstack = Attr<std::vector<std::string>>( auto& callstack = Attr<std::vector<std::string>>(
OpProtoAndCheckerMaker::OpCreationCallstackAttrName()); OpProtoAndCheckerMaker::OpCreationCallstackAttrName());
if (callstack.empty()) { if (callstack.empty()) {
throw exception; throw;
} }
std::ostringstream sout; std::ostringstream sout;
sout << "Invoke operator " << Type() << " error.\n"; sout << "Invoke operator " << Type() << " error.\n";
...@@ -206,7 +206,7 @@ void OperatorBase::Run(const Scope& scope, const platform::Place& place) { ...@@ -206,7 +206,7 @@ void OperatorBase::Run(const Scope& scope, const platform::Place& place) {
sout << "C++ Callstacks: \n"; sout << "C++ Callstacks: \n";
sout << exception.err_str_; sout << exception.err_str_;
exception.err_str_ = sout.str(); exception.err_str_ = sout.str();
throw exception; throw;
} catch (...) { } catch (...) {
std::rethrow_exception(std::current_exception()); std::rethrow_exception(std::current_exception());
} }
...@@ -589,7 +589,7 @@ class RuntimeInferShapeContext : public InferShapeContext { ...@@ -589,7 +589,7 @@ class RuntimeInferShapeContext : public InferShapeContext {
public: public:
RuntimeInferShapeContext(const OperatorBase& op, const Scope& scope, RuntimeInferShapeContext(const OperatorBase& op, const Scope& scope,
const RuntimeContext& ctx) const RuntimeContext& ctx)
: op_(op), scope_(scope), ctx_(ctx) {} : op_(op), ctx_(ctx) {}
bool HasInput(const std::string& name) const override { bool HasInput(const std::string& name) const override {
// has only one input // has only one input
...@@ -881,7 +881,6 @@ class RuntimeInferShapeContext : public InferShapeContext { ...@@ -881,7 +881,6 @@ class RuntimeInferShapeContext : public InferShapeContext {
} }
const OperatorBase& op_; const OperatorBase& op_;
const Scope& scope_;
const RuntimeContext& ctx_; const RuntimeContext& ctx_;
}; };
......
...@@ -222,12 +222,7 @@ class ExecutionContext { ...@@ -222,12 +222,7 @@ class ExecutionContext {
if (it == ctx_.inputs.end()) { if (it == ctx_.inputs.end()) {
return {}; return {};
} }
std::vector<const Variable*> res; return {it->second.begin(), it->second.end()};
res.reserve(it->second.size());
std::transform(it->second.begin(), it->second.end(),
std::back_inserter(res),
[this](Variable* var) { return var; });
return res;
} }
std::vector<Variable*> MultiOutputVar(const std::string& name) const { std::vector<Variable*> MultiOutputVar(const std::string& name) const {
......
...@@ -101,7 +101,7 @@ std::unique_ptr<Graph> IRPassManager::Apply(std::unique_ptr<Graph> graph) { ...@@ -101,7 +101,7 @@ std::unique_ptr<Graph> IRPassManager::Apply(std::unique_ptr<Graph> graph) {
} }
graph = pass->Apply(std::move(graph)); graph = pass->Apply(std::move(graph));
} }
return std::move(graph); return graph;
} }
framework::proto::ProgramDesc IRPassManager::AcquireProgram( framework::proto::ProgramDesc IRPassManager::AcquireProgram(
......
...@@ -421,7 +421,7 @@ std::unique_ptr<PaddlePredictor> CreatePaddlePredictor< ...@@ -421,7 +421,7 @@ std::unique_ptr<PaddlePredictor> CreatePaddlePredictor<
if (!dynamic_cast<AnalysisPredictor *>(predictor.get())->Init(nullptr)) { if (!dynamic_cast<AnalysisPredictor *>(predictor.get())->Init(nullptr)) {
return nullptr; return nullptr;
} }
return std::move(predictor); return predictor;
} }
void AnalysisPredictor::PrepareFeedFetch() { void AnalysisPredictor::PrepareFeedFetch() {
......
...@@ -83,7 +83,7 @@ class ChunkedAllocator : public Allocator { ...@@ -83,7 +83,7 @@ class ChunkedAllocator : public Allocator {
VLOG(1) << "Create AutoIncrementAllocator with chunk_size " VLOG(1) << "Create AutoIncrementAllocator with chunk_size "
<< max_chunk_size_ << " and capacity " << capacity; << max_chunk_size_ << " and capacity " << capacity;
default_allocator_ = std::make_shared<AutoIncrementAllocator>( default_allocator_ = std::make_shared<AutoIncrementAllocator>(
[this] { return std::move(CreateAllocatorWithChunk()); }, capacity); [this] { return CreateAllocatorWithChunk(); }, capacity);
} }
} }
......
...@@ -63,7 +63,6 @@ class VActFunc : public JitCode { ...@@ -63,7 +63,6 @@ class VActFunc : public JitCode {
public: public:
explicit VActFunc(size_t code_size, void* code_ptr) explicit VActFunc(size_t code_size, void* code_ptr)
: JitCode(code_size, code_ptr) {} : JitCode(code_size, code_ptr) {}
virtual const char* name() const = 0;
virtual void genCode() = 0; virtual void genCode() = 0;
protected: protected:
......
...@@ -41,7 +41,7 @@ class VXXJitCode : public JitCode { ...@@ -41,7 +41,7 @@ class VXXJitCode : public JitCode {
this->genCode(); this->genCode();
} }
virtual const char* name() const { virtual const char* name() const override {
std::string base = "VXXJitCode"; std::string base = "VXXJitCode";
if (scalar_index_ == 1) { if (scalar_index_ == 1) {
base += "_Scalar"; base += "_Scalar";
......
...@@ -35,7 +35,7 @@ class HOPVJitCode : public JitCode { ...@@ -35,7 +35,7 @@ class HOPVJitCode : public JitCode {
this->genCode(); this->genCode();
} }
virtual const char* name() const { virtual const char* name() const override {
std::string base = "VXXJitCode"; std::string base = "VXXJitCode";
if (type_ == operand_type::MAX) { if (type_ == operand_type::MAX) {
base += "_MAX"; base += "_MAX";
......
...@@ -68,7 +68,6 @@ class JitCode : public GenBase, public Xbyak::CodeGenerator { ...@@ -68,7 +68,6 @@ class JitCode : public GenBase, public Xbyak::CodeGenerator {
(code_size % 4096 != 0 ? (code_size / 4096 + 1) * 4096 : code_size), (code_size % 4096 != 0 ? (code_size / 4096 + 1) * 4096 : code_size),
code_ptr) {} code_ptr) {}
virtual const char* name() const = 0;
virtual void genCode() = 0; virtual void genCode() = 0;
size_t getSize() const override { return CodeGenerator::getSize(); } size_t getSize() const override { return CodeGenerator::getSize(); }
......
...@@ -36,7 +36,7 @@ class MatMulJitCode : public JitCode { ...@@ -36,7 +36,7 @@ class MatMulJitCode : public JitCode {
this->genCode(); this->genCode();
} }
virtual const char* name() const { virtual const char* name() const override {
std::string base = "MatMulJitCode"; std::string base = "MatMulJitCode";
base = base + "_M" + std::to_string(m_) + "_N" + std::to_string(n_) + "_K" + base = base + "_M" + std::to_string(m_) + "_N" + std::to_string(n_) + "_K" +
std::to_string(k_); std::to_string(k_);
......
...@@ -38,7 +38,7 @@ class SeqPoolJitCode : public JitCode { ...@@ -38,7 +38,7 @@ class SeqPoolJitCode : public JitCode {
this->genCode(); this->genCode();
} }
virtual const char* name() const { virtual const char* name() const override {
std::string base = "SeqPoolJitCode"; std::string base = "SeqPoolJitCode";
if (type_ == SeqPoolType::kSum) { if (type_ == SeqPoolType::kSum) {
base += "_Sum"; base += "_Sum";
......
...@@ -74,12 +74,12 @@ void BindPaddleBuf(py::module *m) { ...@@ -74,12 +74,12 @@ void BindPaddleBuf(py::module *m) {
.def(py::init([](std::vector<float> &data) { .def(py::init([](std::vector<float> &data) {
auto buf = PaddleBuf(data.size() * sizeof(float)); auto buf = PaddleBuf(data.size() * sizeof(float));
std::memcpy(buf.data(), static_cast<void *>(data.data()), buf.length()); std::memcpy(buf.data(), static_cast<void *>(data.data()), buf.length());
return std::move(buf); return buf;
})) }))
.def(py::init([](std::vector<int64_t> &data) { .def(py::init([](std::vector<int64_t> &data) {
auto buf = PaddleBuf(data.size() * sizeof(int64_t)); auto buf = PaddleBuf(data.size() * sizeof(int64_t));
std::memcpy(buf.data(), static_cast<void *>(data.data()), buf.length()); std::memcpy(buf.data(), static_cast<void *>(data.data()), buf.length());
return std::move(buf); return buf;
})) }))
.def("resize", &PaddleBuf::Resize) .def("resize", &PaddleBuf::Resize)
.def("reset", .def("reset",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册