diff --git a/src/common/types.h b/src/common/types.h index cddd96f18fb2224fb93294049c577dd3653ca39c..0c0a6ac1c04cbc9e3ddb916fc656280c36e47d5f 100644 --- a/src/common/types.h +++ b/src/common/types.h @@ -117,6 +117,5 @@ static std::unordered_map< {G_OP_TYPE_PRIOR_BOX, {{"Image", "Input"}, {"Boxes", "Variances"}}}, {G_OP_TYPE_MULTICLASS_NMS, {{"BBoxes", "Scores"}, {"Out"}}}, {G_OP_TYPE_RESHAPE, {{"X"}, {"Out"}}}, - {G_OP_TYPE_DEPTHWISE_CONV, {{"Input"}, {"Output"}}} -}; + {G_OP_TYPE_DEPTHWISE_CONV, {{"Input"}, {"Output"}}}}; } // namespace paddle_mobile diff --git a/src/framework/operator.h b/src/framework/operator.h index ffc23eaff65b4167b48cd7b903380287895d61ed..5fd88a6b209a33dd5554ff11ddf54514f53387ec 100644 --- a/src/framework/operator.h +++ b/src/framework/operator.h @@ -145,7 +145,9 @@ class FusionOpMatcher : PaddleMobileObject { virtual std::string Type() = 0; - virtual void FolderNodes(Node *node, std::vector> *removed_nodes) { + virtual void FolderNodes( + Node *node, + std::vector> *removed_nodes) { node->Folder(node_.Depth(), Type(), {}, removed_nodes); } diff --git a/src/framework/program/program-optimize/node.cpp b/src/framework/program/program-optimize/node.cpp index 05d30143ecf32ca57f8841285d626c76e6912a58..eba5e8b6504e04ec3f9d0d235cc04efd4937baae 100644 --- a/src/framework/program/program-optimize/node.cpp +++ b/src/framework/program/program-optimize/node.cpp @@ -254,8 +254,7 @@ void Node::Folder( std::shared_ptr op_desc, std::vector> *outputs, uint index, std::map> *change, - Node *begin_node, - std::vector> *removed_nodes) { + Node *begin_node, std::vector> *removed_nodes) { if (change->find(this->type_) != change->end()) { auto change_pair = (*change)[this->type_]; op_desc->GetInputs()[change_pair.second] = @@ -269,7 +268,8 @@ void Node::Folder( --index; for (auto output : outputs_) { removed_nodes->push_back(output); - output->Folder(op_desc, outputs, index, change, begin_node, removed_nodes); + output->Folder(op_desc, outputs, index, change, begin_node, + removed_nodes); } } else { for (auto &op_output : this->op_desc_->outputs_) { @@ -282,7 +282,6 @@ void Node::Folder( if (iter != output->inputs_.end()) { output->inputs_.erase(iter); - } output->inputs_.push_back(begin_node); outputs->push_back(output); diff --git a/src/framework/program/program-optimize/node.h b/src/framework/program/program-optimize/node.h index 444fe6c3ddcdcf209dc1d098cfb99bf58886be62..84a7477dc4c952d64df3b7364bf3072e31dec49b 100644 --- a/src/framework/program/program-optimize/node.h +++ b/src/framework/program/program-optimize/node.h @@ -43,7 +43,8 @@ class Node : PaddleMobileObject { uint Depth(uint begin = 0); Node &Folder( uint size, std::string type, - std::map> change_map, std::vector> *removed_nodes); + std::map> change_map, + std::vector> *removed_nodes); std::vector> OpDescs(uint size); std::vector> OpDescs(); std::shared_ptr OpDescOfNode() { return op_desc_; } @@ -63,8 +64,7 @@ class Node : PaddleMobileObject { std::shared_ptr op_desc, std::vector> *outputs, uint index, std::map> *change, - Node *begin_node, - std::vector> *removed_nodes); + Node *begin_node, std::vector> *removed_nodes); std::shared_ptr op_desc_; std::string ToString(std::string blank, const Node *node) const; std::vector> outputs_; diff --git a/src/framework/program/program-optimize/program_optimize.cpp b/src/framework/program/program-optimize/program_optimize.cpp index 88d6b1d47ad26fe67d53efdd68c61bba58d36203..9f657d5369fd9d52d63b709c3ad438ff59e46f32 100644 --- a/src/framework/program/program-optimize/program_optimize.cpp +++ b/src/framework/program/program-optimize/program_optimize.cpp @@ -31,7 +31,6 @@ std::shared_ptr ProgramOptimize::FushionOptimize( std::unordered_map>> type_map; - std::vector> nodes; std::shared_ptr begin_node; @@ -41,7 +40,8 @@ std::shared_ptr ProgramOptimize::FushionOptimize( auto op = block->Ops()[j]; auto op_type = op->Type(); if (op_input_output_key.find(op->Type()) == op_input_output_key.end()) { - LOG(kLOG_ERROR) << "has not support op return null " << " op type: " << op->Type(); + LOG(kLOG_ERROR) << "has not support op return null " + << " op type: " << op->Type(); return nullptr; } @@ -97,14 +97,15 @@ std::shared_ptr ProgramOptimize::FushionOptimize( for (int j = 0; j < removed_nodes.size(); ++j) { auto removed_node = removed_nodes[j]; - auto removed_ite = std::find(nodes.begin(), nodes.end(), removed_node); + auto removed_ite = + std::find(nodes.begin(), nodes.end(), removed_node); nodes.erase(removed_ite); } } } } -// DLOG << "node: \n" << *begin_node; + // DLOG << "node: \n" << *begin_node; std::vector> op_descs; // bool can_splite = begin_node->CanSplit({G_OP_TYPE_CONV, @@ -113,7 +114,7 @@ std::shared_ptr ProgramOptimize::FushionOptimize( auto &node = nodes[m]; op_descs.push_back(node->op_desc_); } -// GenerateOps(&op_descs, begin_node.get()); + // GenerateOps(&op_descs, begin_node.get()); block->ops_ = op_descs; } @@ -128,7 +129,6 @@ std::shared_ptr ProgramOptimize::FushionOptimize( void ProgramOptimize::GenerateOps( std::vector> *op_desc, Node *input_node, Node *current_node) { - if (current_node->inputs_.size() > 1 && input_node != current_node->inputs_.back()) { DLOG << " current type " << current_node->type_; diff --git a/src/operators/fusion_conv_add.cpp b/src/operators/fusion_conv_add.cpp index ffddd25dff708e0feaf3fd0b35e0150b2fe3391d..433e3ee741d37fefead87fc6d08723fde8142387 100644 --- a/src/operators/fusion_conv_add.cpp +++ b/src/operators/fusion_conv_add.cpp @@ -17,9 +17,7 @@ namespace paddle_mobile { namespace operators { template -void FushionConvAddOp::InferShape() const { - -} +void FushionConvAddOp::InferShape() const {} template class FushionConvAddOp; } // namespace operators } // namespace paddle_mobile diff --git a/src/operators/fusion_conv_add.h b/src/operators/fusion_conv_add.h index 92ed236d8b52df73e79284ce9edda5778fe13bd3..c6a1d9fdff246084542d50230a7649e938143c4a 100644 --- a/src/operators/fusion_conv_add.h +++ b/src/operators/fusion_conv_add.h @@ -31,10 +31,13 @@ class FusionConvAddMatcher : public framework::FusionOpMatcher { node_ > std::make_shared(G_OP_TYPE_ELEMENTWISE_ADD); } - void FolderNodes(framework::Node *node, std::vector> *removed_nodes) { + void FolderNodes( + framework::Node *node, + std::vector> *removed_nodes) { vector> origin_descs = - node->OpDescs(node_.Depth()); - node->Folder(node_.Depth(), Type(), {{G_OP_TYPE_ELEMENTWISE_ADD, {"Y", "Y"}}}, removed_nodes); + node->OpDescs(node_.Depth()); + node->Folder(node_.Depth(), Type(), + {{G_OP_TYPE_ELEMENTWISE_ADD, {"Y", "Y"}}}, removed_nodes); } std::string Type() { return G_OP_TYPE_CONV_ADD; } @@ -44,14 +47,13 @@ template class FushionConvAddOp : public framework::OperatorWithKernel { public: FushionConvAddOp(const string &type, const VariableNameMap &inputs, - const VariableNameMap &outputs, - const framework::AttributeMap attrs, - std::shared_ptr scope) - : framework::OperatorWithKernel(type, inputs, outputs, attrs, - scope) {} + const VariableNameMap &outputs, + const framework::AttributeMap attrs, + std::shared_ptr scope) + : framework::OperatorWithKernel(type, inputs, outputs, attrs, + scope) {} - void RunImpl() const { - } + void RunImpl() const {} using framework::OperatorWithKernel::OperatorWithKernel; void InferShape() const override; @@ -60,7 +62,7 @@ class FushionConvAddOp : public framework::OperatorWithKernel { // FushionFcParam param_; }; -//static framework::FusionOpRegistrar fc_registrar(new FusionConvAddMatcher()); +// static framework::FusionOpRegistrar fc_registrar(new FusionConvAddMatcher()); } // namespace operators } // namespace paddle_mobile diff --git a/src/operators/fusion_conv_add_relu_op.h b/src/operators/fusion_conv_add_relu_op.h index 9fc9fdee5e834e9db1971abda05c102a07c29673..43279e1f995f4b18ca976e51d1a4f81847c975b9 100644 --- a/src/operators/fusion_conv_add_relu_op.h +++ b/src/operators/fusion_conv_add_relu_op.h @@ -28,7 +28,9 @@ class FushionConvAddReluOpMatcher : public framework::FusionOpMatcher { std::make_shared(G_OP_TYPE_RELU); } - void FolderNodes(framework::Node *node, std::vector> *removed_nodes) { + void FolderNodes( + framework::Node *node, + std::vector> *removed_nodes) { std::vector> origin_descs = node->OpDescs(node_.Depth()); node->Folder(node_.Depth(), Type(), diff --git a/src/operators/fusion_fc_op.h b/src/operators/fusion_fc_op.h index a42d100f9241ea7eee7658fe84e1629863c864e3..a0eeebca5f5f028bec75703a4a4befeb18e374fe 100644 --- a/src/operators/fusion_fc_op.h +++ b/src/operators/fusion_fc_op.h @@ -32,7 +32,9 @@ class FusionFcMatcher : public framework::FusionOpMatcher { node_ > std::make_shared(G_OP_TYPE_ELEMENTWISE_ADD); } - void FolderNodes(framework::Node *node, std::vector> *removed_nodes) { + void FolderNodes( + framework::Node *node, + std::vector> *removed_nodes) { vector> origin_descs = node->OpDescs(node_.Depth()); node->Folder(node_.Depth(), Type(), @@ -65,7 +67,7 @@ class FushionFcOp : public framework::OperatorWithKernel { FushionFcParam param_; }; -//static framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher()); +// static framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher()); } // namespace operators } // namespace paddle_mobile diff --git a/src/operators/kernel/mali/conv_kernel.cpp b/src/operators/kernel/mali/conv_kernel.cpp index 0972880cb373dc313135b7d452f7151df30ac971..75672549583ebc15867e5f279d5ce3a7137e5b70 100644 --- a/src/operators/kernel/mali/conv_kernel.cpp +++ b/src/operators/kernel/mali/conv_kernel.cpp @@ -17,10 +17,9 @@ limitations under the License. */ namespace paddle_mobile { namespace operators { - template<> - void ConvKernel::Compute(const ConvParam ¶m) const - {} +template <> +void ConvKernel::Compute(const ConvParam ¶m) const {} - template class ConvKernel; -} +template class ConvKernel; +} // namespace operators } // namespace paddle_mobile diff --git a/test/framework/test_optimize.cpp b/test/framework/test_optimize.cpp index 8915941562a5b4d3626ae9bde68d0be400d78e2c..685a2eca6e090220f7c48fff036f1f84ed7c8fce 100644 --- a/test/framework/test_optimize.cpp +++ b/test/framework/test_optimize.cpp @@ -12,10 +12,10 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -#include "io.h" #include "../test_helper.h" #include "framework/program/program-optimize/node.h" #include "framework/program/program-optimize/program_optimize.h" +#include "io.h" int main() { paddle_mobile::Loader loader; @@ -25,7 +25,7 @@ int main() { // program.originProgram->Description("origin"); auto optimize_program = optimize.FushionOptimize(program.originProgram); if (optimize_program != nullptr) { -// optimize_program->Description("optimize"); + // optimize_program->Description("optimize"); } else { LOG(paddle_mobile::kLOG_ERROR) << "optimize_program is null"; }