From 0c9279afa3a0afbc726a2f370051a2269a23c889 Mon Sep 17 00:00:00 2001 From: liuruilong Date: Thu, 17 May 2018 14:59:18 +0800 Subject: [PATCH] git log printer --- CMakeLists.txt | 4 + src/common/log.h | 151 +++++++++++++++++++++++ src/framework/executor.cpp | 12 -- src/framework/operator.h | 7 ++ src/io.cpp | 15 ++- src/operators/conv_op.h | 1 + src/operators/kernel/arm/conv_kernel.cpp | 8 +- src/operators/op_param.cpp | 35 +++--- src/operators/op_param.h | 3 +- test/main.cpp | 1 + test/unit-test/test_log.cpp | 31 +++++ 11 files changed, 229 insertions(+), 39 deletions(-) create mode 100644 src/common/log.h create mode 100644 test/unit-test/test_log.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 18cf94f303..1b54c0b155 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -48,3 +48,7 @@ add_dependencies(paddle-mobile openblas_proj) # gen test ADD_EXECUTABLE(paddle-mobile-test test/main.cpp test/test_helper.h) target_link_libraries(paddle-mobile-test paddle-mobile) + +# gen test log +ADD_EXECUTABLE(test-log test/unit-test/test_log.cpp) +target_link_libraries(test-log paddle-mobile) diff --git a/src/common/log.h b/src/common/log.h new file mode 100644 index 0000000000..4fae42893b --- /dev/null +++ b/src/common/log.h @@ -0,0 +1,151 @@ +/* Copyright (c) 2016 Baidu, Inc. All Rights Reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +==============================================================================*/ + +#pragma once + +#ifdef PADDLE_MOBILE_DEBUG + +#include +#include +#include +#include + +namespace paddle_mobile { + +enum LogLevel { + kNO_LOG, + kLOG_ERROR, + kLOG_WARNING, + kLOG_INFO, + kLOG_DEBUG, + kLOG_DEBUG1, + kLOG_DEBUG2, + kLOG_DEBUG3, + kLOG_DEBUG4 +}; + +// log level +static LogLevel log_level = kLOG_DEBUG4; + +static std::vector logs{"NO", "ERROR ", "WARNING", + "INFO ", "DEBUG ", "DEBUG1 ", + "DEBUG2 ", "DEBUG3 ", "DEBUG4 "}; + +struct ToLog; + +struct Print { + friend struct ToLog; + template Print &operator<<(T const &value) { + buffer_ << value; + return *this; + } + +private: + void print(LogLevel level) { + buffer_ << std::endl; + if (level == kLOG_ERROR) { + std::cerr << buffer_.str(); + } else { + std::cout << buffer_.str(); + } + } + std::ostringstream buffer_; +}; + +struct ToLog { + ToLog(LogLevel level = kLOG_DEBUG, const std::string &info = "") + : level_(level) { + unsigned blanks = + (unsigned)(level > kLOG_DEBUG ? (level - kLOG_DEBUG) * 4 : 1); + printer_ << logs[level] << " " << info << ":" << std::string(blanks, ' '); + } + + template ToLog &operator<<(T const &value) { + printer_ << value; + return *this; + } + + ~ToLog() { printer_.print(level_); } + +private: + LogLevel level_; + Print printer_; +}; + +#define LOG(level) \ + if (level > paddle_mobile::log_level) { \ + } else \ + paddle_mobile::ToLog(level, \ + (std::stringstream() \ + << "[file: " << (strrchr(__FILE__, '/') \ + ? (strrchr(__FILE__, '/') + 1) \ + : __FILE__) \ + << "] [line: " << __LINE__ << "] ") \ + .str()) + +#define DLOG \ + paddle_mobile::ToLog(paddle_mobile::kLOG_DEBUG, \ + (std::stringstream() \ + << "[file: " << (strrchr(__FILE__, '/') \ + ? (strrchr(__FILE__, '/') + 1) \ + : __FILE__) \ + << "] [line: " << __LINE__ << "] ") \ + .str()) +} + +#else + +namespace paddle_mobile { + +enum LogLevel { + kNO_LOG, + kLOG_ERROR, + kLOG_WARNING, + kLOG_INFO, + kLOG_DEBUG, + kLOG_DEBUG1, + kLOG_DEBUG2, + kLOG_DEBUG3, + kLOG_DEBUG4 +}; + +struct ToLog; +struct Print { + friend struct ToLog; + template Print &operator<<(T const &value) {} + +private: +}; + +struct ToLog { + ToLog(LogLevel level) {} + + template ToLog &operator<<(T const &value) { return *this; } +}; + +#define LOG(level) \ + if (true) { \ + } else \ + paddle_mobile::ToLog(level) + +#define DLOG \ + if (true) { \ + } else \ + paddle_mobile::ToLog(paddle_mobile::kLOG_DEBUG) +} +#endif diff --git a/src/framework/executor.cpp b/src/framework/executor.cpp index 1459f04de5..19a4f5d66c 100644 --- a/src/framework/executor.cpp +++ b/src/framework/executor.cpp @@ -35,27 +35,15 @@ Executor::Executor(const Program p) : program_(p) { const std::vector> blocks = to_predict_program_->Blocks(); - // std::cout << " **block size " << blocks.size() << std::endl; for (int i = 0; i < blocks.size(); ++i) { std::shared_ptr block_desc = blocks[i]; std::vector> ops = block_desc->Ops(); - // std::cout << " ops " << ops.size() << std::endl; for (int j = 0; j < ops.size(); ++j) { std::shared_ptr op = ops[j]; - // std::cout << " input 0 " << op->Input("Input")[0] << std::endl; if (op->Type() == "conv2d" && op->Input("Input")[0] == "pixel") { - // std::cout << " conv2d attr size: " << op->GetAttrMap().size() - // << std::endl; - // std::cout << " input size: " << op->GetInputs().size() << - // std::endl; - - // std::cout << " output size: " << op->GetOutputs().size() << - // std::endl; - Attribute strides_attr = op->GetAttrMap().at("strides"); std::vector stride = strides_attr.Get>(); for (int k = 0; k < stride.size(); ++k) { - // std::cout << " stride " << stride[k] << std::endl; } std::shared_ptr> conv = diff --git a/src/framework/operator.h b/src/framework/operator.h index e14e30aba8..b98e4ee63e 100644 --- a/src/framework/operator.h +++ b/src/framework/operator.h @@ -68,6 +68,13 @@ public: : OperatorBase(type, inputs, outputs, attrs, scope) {} virtual void InferShape() const = 0; + void ClearVariables() const { + if (this->scope_) { + this->scope_->EraseVars(this->inputs_.at("Filter")); + this->scope_->EraseVars(this->inputs_.at("Input")); + } + } + protected: virtual void RunImpl() const = 0; diff --git a/src/io.cpp b/src/io.cpp index 5fb7d8b30a..0d97e9bfd6 100644 --- a/src/io.cpp +++ b/src/io.cpp @@ -19,6 +19,7 @@ SOFTWARE. #include #include +#include "common/log.h" #include "framework/framework.pb.h" #include "framework/lod_tensor.h" #include "framework/program_desc.h" @@ -41,25 +42,27 @@ void ReadBinaryFile(const std::string &filename, std::string *contents) { template void Loader::LoadVar(framework::LoDTensor *tensor, const std::string &file_path) { - // std::cout << " to load " << file_path << std::endl; + + LOG(kLOG_DEBUG) << " to load " << file_path; + // Log(kLOG_DEBUG) << "123"; std::ifstream is(file_path); std::streampos pos = is.tellg(); // save current position is.seekg(0, std::ios::end); - // std::cout << " file length = " << is.tellg() << std::endl; + LOG(kLOG_DEBUG) << " file length = " << is.tellg(); is.seekg(pos); // restore saved position // 1. version uint32_t version; is.read(reinterpret_cast(&version), sizeof(version)); - // std::cout << " version: " << version << std::endl; + LOG(kLOG_INFO) << " version: " << version; // 2 Lod information uint64_t lod_level; is.read(reinterpret_cast(&lod_level), sizeof(lod_level)); - // std::cout << " load level: " << lod_level << std::endl; - // std::cout << " lod info: " << std::endl; + LOG(kLOG_DEBUG) << " load level: " << lod_level; + LOG(kLOG_DEBUG) << " lod info: "; auto &lod = *tensor->mutable_lod(); lod.resize(lod_level); for (uint64_t i = 0; i < lod_level; ++i) { @@ -69,7 +72,7 @@ void Loader::LoadVar(framework::LoDTensor *tensor, is.read(reinterpret_cast(tmp.data()), static_cast(size)); for (int j = 0; j < tmp.size(); ++j) { - // std::cout << " lod - " << tmp[j] << std::endl; + LOG(kLOG_DEBUG1) << " lod - " << tmp[j]; } lod[i] = tmp; } diff --git a/src/operators/conv_op.h b/src/operators/conv_op.h index 4126c49f93..e8a3b2a253 100644 --- a/src/operators/conv_op.h +++ b/src/operators/conv_op.h @@ -43,6 +43,7 @@ protected: void RunImpl() const { operators::ConvKernel kernel; kernel.Compute(param_); + this->ClearVariables(); } ConvParam param_; diff --git a/src/operators/kernel/arm/conv_kernel.cpp b/src/operators/kernel/arm/conv_kernel.cpp index f545323291..1b158e85a7 100644 --- a/src/operators/kernel/arm/conv_kernel.cpp +++ b/src/operators/kernel/arm/conv_kernel.cpp @@ -38,7 +38,7 @@ template <> void ConvKernel::Compute(const ConvParam ¶m) const { const Tensor *input = param.Input(); - std::cout << " conv param " << param << std::endl; + LOG(kLOG_DEBUG) << param; // The filter will be reshaped in the calculations, // so here use an assignment operation, @@ -53,7 +53,7 @@ void ConvKernel::Compute(const ConvParam ¶m) const { std::vector paddings = param.Paddings(); std::vector dilations = param.Dilations(); - std::cout << " compute end get Attrs " << strides[0] << std::endl; + DLOG << " compute end get Attrs " << strides[0]; const int batch_size = static_cast(input->dims()[0]); @@ -99,9 +99,9 @@ void ConvKernel::Compute(const ConvParam ¶m) const { filter.numel() / filter.dims()[0]}; filter.Resize(filter_matrix_shape); - std::cout << " input dim " << input->dims() << std::endl; + DLOG << " input dim " << input->dims(); - std::cout << " output dim " << output->dims() << std::endl; + DLOG << " output dim " << output->dims(); framework::DDim output_matrix_shape = { output->dims()[1], diff --git a/src/operators/op_param.cpp b/src/operators/op_param.cpp index 789ef86c46..d5284e47fc 100644 --- a/src/operators/op_param.cpp +++ b/src/operators/op_param.cpp @@ -21,22 +21,25 @@ SOFTWARE. namespace paddle_mobile { namespace operators { -std::ostream &operator<<(std::ostream &os, const ConvParam &conv_param) { - os << "parameter of conv: " << std::endl; - os << " stride: " - << " (" << conv_param.Strides()[0] << conv_param.Strides()[1] << ") " - << std::endl; - os << " paddings: " - << " (" << conv_param.Paddings()[0] << conv_param.Paddings()[1] << ") " - << std::endl; - os << " dilations: " - << " (" << conv_param.Dilations()[0] << conv_param.Dilations()[1] << ") " - << std::endl; - os << " groups: " << conv_param.Groups() << std::endl; - os << " input dims: " << conv_param.Input()->dims() << std::endl; - os << " filter dims: " << conv_param.Filter()->dims() << std::endl; - os << " output dims: " << conv_param.Output()->dims() << std::endl; - return os; +Print &operator<<(Print &printer, const ConvParam &conv_param) { + printer << "parameter of conv: " + << "\n"; + printer << " stride: " + << " (" << conv_param.Strides()[0] << conv_param.Strides()[1] << ") " + << "\n"; + printer << " paddings: " + << " (" << conv_param.Paddings()[0] << conv_param.Paddings()[1] + << ") " + << "\n"; + printer << " dilations: " + << " (" << conv_param.Dilations()[0] << conv_param.Dilations()[1] + << ") " + << "\n"; + printer << " groups: " << conv_param.Groups() << "\n"; + printer << " input dims: " << conv_param.Input()->dims() << "\n"; + printer << " filter dims: " << conv_param.Filter()->dims() << "\n"; + printer << " output dims: " << conv_param.Output()->dims(); + return printer; } } // namespace operators diff --git a/src/operators/op_param.h b/src/operators/op_param.h index 01463008a8..07e28e1928 100644 --- a/src/operators/op_param.h +++ b/src/operators/op_param.h @@ -18,6 +18,7 @@ SOFTWARE. #pragma once; +#include "common/log.h" #include "common/type_define.h" #include "framework/lod_tensor.h" #include "framework/scope.h" @@ -104,7 +105,7 @@ private: int groups; }; -std::ostream &operator<<(std::ostream &os, const ConvParam &conv_param); +Print &operator<<(Print &printer, const ConvParam &conv_param); } // namespace operators } // namespace paddle_mobile diff --git a/test/main.cpp b/test/main.cpp index 41ac36b39c..45b3d9e5f6 100644 --- a/test/main.cpp +++ b/test/main.cpp @@ -49,6 +49,7 @@ int main() { // } paddle_mobile::Loader loader; + //../../test/models/image_classification_resnet.inference.model auto program = loader.Load(std::string( "../../test/models/image_classification_resnet.inference.model")); diff --git a/test/unit-test/test_log.cpp b/test/unit-test/test_log.cpp new file mode 100644 index 0000000000..ea0b9f0a27 --- /dev/null +++ b/test/unit-test/test_log.cpp @@ -0,0 +1,31 @@ +/* Copyright (c) 2016 Baidu, Inc. All Rights Reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +==============================================================================*/ + +#include "common/log.h" + +int main() { + LOG(paddle_mobile::kLOG_DEBUG) << "test debug" + << " next log"; + LOG(paddle_mobile::kLOG_DEBUG1) << "test debug1" + << " next log"; + LOG(paddle_mobile::kLOG_DEBUG2) << "test debug2" + << " next log"; + DLOG << "test DLOG"; + LOG(paddle_mobile::kLOG_ERROR) << " error occur !"; + return 0; +} -- GitLab