types.cpp 9.4 KB
Newer Older
L
liuruilong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "common/types.h"
L
liuruilong 已提交
16
#include <vector>
L
liuruilong 已提交
17 18 19

namespace paddle_mobile {

20 21 22 23 24
const char *G_OP_TYPE_CONV = "conv2d";
const char *G_OP_TYPE_BATCHNORM = "batch_norm";
const char *G_OP_TYPE_BOX_CODER = "box_coder";
const char *G_OP_TYPE_CONCAT = "concat";
const char *G_OP_TYPE_ELEMENTWISE_ADD = "elementwise_add";
25 26
const char *G_OP_TYPE_ELEMENTWISE_SUB = "elementwise_sub";
const char *G_OP_TYPE_ELEMENTWISE_MUL = "elementwise_mul";
L
lijiancheng0614 已提交
27
const char *G_OP_TYPE_FILL_CONSTANT = "fill_constant";
28
const char *G_OP_TYPE_FUSION_CONV_ADD_RELU = "fusion_conv_add_relu";
29 30
const char *G_OP_TYPE_FUSION_CONV_ADD_PRELU = "fusion_conv_add_prelu";
const char *G_OP_TYPE_FUSION_CONV_ADD_ADD_PRELU = "fusion_conv_add_add_prelu";
31
const char *G_OP_TYPE_FUSION_CONV_ADD_BN_RELU = "fusion_conv_add_bn_relu";
32
const char *G_OP_TYPE_FUSION_CONV_BN_ADD_RELU = "fusion_conv_bn_add_relu";
33 34 35 36 37 38 39
const char *G_OP_TYPE_FUSION_DWCONV_BN_RELU = "fusion_dwconv_bn_relu";
const char *G_OP_TYPE_FUSION_CONV_BN_RELU = "fusion_conv_bn_relu";
const char *G_OP_TYPE_FC = "fusion_fc";
const char *G_OP_TYPE_FUSION_CONV_ADD = "fusion_conv_add";
const char *G_OP_TYPE_LRN = "lrn";
const char *G_OP_TYPE_MUL = "mul";
const char *G_OP_TYPE_MULTICLASS_NMS = "multiclass_nms";
Z
zhaojiaying01 已提交
40
const char *G_OP_TYPE_NORM = "norm";
L
lijiancheng0614 已提交
41
const char *G_OP_TYPE_POLYGON_BOX_TRANSFORM = "polygon_box_transform";
42 43 44
const char *G_OP_TYPE_POOL2D = "pool2d";
const char *G_OP_TYPE_PRIOR_BOX = "prior_box";
const char *G_OP_TYPE_RELU = "relu";
45
const char *G_OP_TYPE_RELU6 = "relu6";
46
const char *G_OP_TYPE_RESHAPE = "reshape";
L
lijiancheng0614 已提交
47
const char *G_OP_TYPE_RESHAPE2 = "reshape2";
48 49 50
const char *G_OP_TYPE_SIGMOID = "sigmoid";
const char *G_OP_TYPE_SOFTMAX = "softmax";
const char *G_OP_TYPE_TRANSPOSE = "transpose";
L
lijiancheng0614 已提交
51
const char *G_OP_TYPE_TRANSPOSE2 = "transpose2";
52 53 54 55 56 57 58 59 60
const char *G_OP_TYPE_SPLIT = "split";
const char *G_OP_TYPE_FEED = "feed";
const char *G_OP_TYPE_FETCH = "fetch";
const char *G_OP_TYPE_DEPTHWISE_CONV = "depthwise_conv2d";
const char *G_OP_TYPE_IM2SEQUENCE = "im2sequence";
const char *G_OP_TYPE_DROPOUT = "dropout";
const char *G_OP_TYPE_FUSION_CONV_ADD_BN = "fusion_conv_add_bn";
const char *G_OP_TYPE_FUSION_POOL_BN = "fusion_pool_bn";
const char *G_OP_TYPE_FUSION_ELEMENTWISE_ADD_RELU =
Z
zhangyang 已提交
61
    "fusion_elementwise_add_relu";
62 63
const char *G_OP_TYPE_FUSION_FC_RELU = "fusion_fc_relu";
const char *G_OP_TYPE_REGION = "region";
qnqinan's avatar
qnqinan 已提交
64
const char *G_OP_TYPE_FUSION_CONV_BN = "fusion_conv_bn";
65 66
const char *G_OP_TYPE_CONV_TRANSPOSE = "conv2d_transpose";
const char *G_OP_TYPE_PRELU = "prelu";
xiebaiyuan's avatar
xiebaiyuan 已提交
67 68
const char *G_OP_TYPE_LOOKUP_TABLE = "lookup_table";
const char *G_OP_TYPE_GRU = "gru";
Z
zhaojiaying01 已提交
69
const char *G_OP_TYPE_GRU_UNIT = "gru_unit";
xiebaiyuan's avatar
xiebaiyuan 已提交
70
const char *G_OP_TYPE_CRF = "crf_decoding";
71 72 73
const char *G_OP_TYPE_BILINEAR_INTERP = "bilinear_interp";
const char *G_OP_TYPE_FLATTEN = "flatten";
const char *G_OP_TYPE_SHAPE = "shape";
E
eclipsess 已提交
74
const char *G_OP_TYPE_SUM = "sum";
H
hjchen2 已提交
75 76
const char *G_OP_TYPE_TOP_K = "top_k";
const char *G_OP_TYPE_CAST = "cast";
77 78
const char *G_OP_TYPE_LOG = "log";
const char *G_OP_TYPE_LOD_RESET = "lod_reset";
79
const char *G_OP_TYPE_LESS_THAN = "less_than";
80 81 82 83
const char *G_OP_TYPE_LOGICAL_AND = "logical_and";
const char *G_OP_TYPE_LOGICAL_OR = "logical_or";
const char *G_OP_TYPE_LOGICAL_NOT = "logical_not";
const char *G_OP_TYPE_LOGICAL_XOR = "logical_xor";
L
liuruilong 已提交
84

85 86
const char *G_OP_TYPE_QUANTIZE = "quantize";
const char *G_OP_TYPE_DEQUANTIZE = "dequantize";
87
const char *G_OP_TYPE_FUSION_DEQUANT_BN = "fusion_dequant_bn";
88 89
const char *G_OP_TYPE_FUSION_DEQUANT_ADD_BN = "fusion_dequant_add_bn";
const char *G_OP_TYPE_FUSION_DEQUANT_BN_RELU = "fusion_dequant_bn_relu";
H
hjchen2 已提交
90
const char *G_OP_TYPE_FUSION_DEQUANT_ADD_BN_RELU = "fusion_dequant_add_bn_relu";
91 92 93 94
const char *G_OP_TYPE_FUSION_DEQUANT_ADD_BN_QUANT =
    "fusion_dequant_add_bn_quant";
const char *G_OP_TYPE_FUSION_DEQUANT_ADD_BN_RELU_QUANT =
    "fusion_dequant_add_bn_relu_quant";
H
hjchen2 已提交
95

Z
zhangyang 已提交
96 97 98 99
const char *G_OP_TYPE_TANH = "tanh";
const char *G_OP_TYPE_FUSION_DECONV_RELU = "fusion_deconv_relu";
const char *G_OP_TYPE_FUSION_DECONV_ADD = "fusion_deconv_add";
const char *G_OP_TYPE_FUSION_DECONV_ADD_RELU = "fusion_deconv_add_relu";
100

101 102
const char *G_OP_TYPE_SEQUENCE_EXPAND = "sequence_expand";
const char *G_OP_TYPE_SEQUENCE_POOL = "sequence_pool";
H
hjchen2 已提交
103
const char *G_OP_TYPE_SEQUENCE_SOFTMAX = "sequence_softmax";
104

L
liuruilong 已提交
105
std::unordered_map<
L
liuruilong 已提交
106 107
    std::string, std::pair<std::vector<std::string>, std::vector<std::string>>>
    op_input_output_key = {
L
liuruilong 已提交
108
        {G_OP_TYPE_CONV, {{"Input"}, {"Output"}}},
109 110
        {G_OP_TYPE_FUSION_DWCONV_BN_RELU, {{"Input"}, {"Out"}}},
        {G_OP_TYPE_FUSION_CONV_BN_RELU, {{"Input"}, {"Out"}}},
111
        {G_OP_TYPE_PRELU, {{"X", "Alpha"}, {"Out"}}},
E
eclipsess 已提交
112
        {G_OP_TYPE_FUSION_CONV_ADD, {{"Input"}, {"Out"}}},
L
liuruilong 已提交
113
        {G_OP_TYPE_RELU, {{"X"}, {"Out"}}},
114
        {G_OP_TYPE_RELU6, {{"X"}, {"Out"}}},
L
liuruilong 已提交
115
        {G_OP_TYPE_SOFTMAX, {{"X"}, {"Out"}}},
E
eclipsess 已提交
116
        {G_OP_TYPE_SIGMOID, {{"X"}, {"Out"}}},
L
liuruilong 已提交
117 118
        {G_OP_TYPE_MUL, {{"X"}, {"Out"}}},
        {G_OP_TYPE_ELEMENTWISE_ADD, {{"X", "Y"}, {"Out"}}},
119 120
        {G_OP_TYPE_ELEMENTWISE_SUB, {{"X", "Y"}, {"Out"}}},
        {G_OP_TYPE_ELEMENTWISE_MUL, {{"X", "Y"}, {"Out"}}},
L
liuruilong 已提交
121 122 123 124 125 126 127 128
        {G_OP_TYPE_POOL2D, {{"X"}, {"Out"}}},
        {G_OP_TYPE_BATCHNORM, {{"X"}, {"Y"}}},
        {G_OP_TYPE_LRN, {{"X"}, {"Out"}}},
        {G_OP_TYPE_CONCAT, {{"X"}, {"Out"}}},
        {G_OP_TYPE_SPLIT, {{"X"}, {"Out"}}},
        {G_OP_TYPE_FEED, {{"X"}, {"Out"}}},
        {G_OP_TYPE_FETCH, {{"X"}, {"Out"}}},
        {G_OP_TYPE_TRANSPOSE, {{"X"}, {"Out"}}},
L
lijiancheng0614 已提交
129
        {G_OP_TYPE_TRANSPOSE2, {{"X"}, {"Out", "XShape"}}},
L
liuruilong 已提交
130
        {G_OP_TYPE_BOX_CODER,
L
liuruilong 已提交
131
         {{"PriorBox", "PriorBoxVar", "TargetBox"}, {"OutputBox"}}},
E
format  
eclipsess 已提交
132
        {G_OP_TYPE_FUSION_CONV_ADD_BN_RELU, {{"Input"}, {"Out"}}},
133
        {G_OP_TYPE_FUSION_CONV_BN_ADD_RELU, {{"Input"}, {"Out"}}},
L
liuruilong 已提交
134 135
        {G_OP_TYPE_PRIOR_BOX, {{"Image", "Input"}, {"Boxes", "Variances"}}},
        {G_OP_TYPE_MULTICLASS_NMS, {{"BBoxes", "Scores"}, {"Out"}}},
L
lijiancheng0614 已提交
136
        {G_OP_TYPE_POLYGON_BOX_TRANSFORM, {{"Input"}, {"Output"}}},
L
liuruilong 已提交
137 138
        {G_OP_TYPE_FC, {{"X", "Y", "Z"}, {"Out"}}},
        {G_OP_TYPE_RESHAPE, {{"X"}, {"Out"}}},
L
lijiancheng0614 已提交
139
        {G_OP_TYPE_RESHAPE2, {{"X"}, {"Out", "XShape"}}},
L
liuruilong 已提交
140
        {G_OP_TYPE_DEPTHWISE_CONV, {{"Input"}, {"Output"}}},
L
lijiancheng0614 已提交
141
        {G_OP_TYPE_FILL_CONSTANT, {{}, {"Out"}}},
142
        {G_OP_TYPE_FUSION_CONV_ADD_RELU, {{"Input"}, {"Out"}}},
143 144
        {G_OP_TYPE_FUSION_CONV_ADD_PRELU, {{"Input"}, {"Out"}}},
        {G_OP_TYPE_FUSION_CONV_ADD_ADD_PRELU, {{"Input"}, {"Out"}}},
Y
Yao,kun 已提交
145
        {G_OP_TYPE_IM2SEQUENCE, {{"X"}, {"Out"}}},
Z
zhangyang 已提交
146
        {G_OP_TYPE_DROPOUT, {{"X"}, {"Out"}}},
qnqinan's avatar
qnqinan 已提交
147 148
        {G_OP_TYPE_FUSION_CONV_ADD_BN, {{"Input"}, {"Y"}}},
        {G_OP_TYPE_FUSION_POOL_BN, {{"X"}, {"Y"}}},
Z
zhangyang 已提交
149
        {G_OP_TYPE_FUSION_ELEMENTWISE_ADD_RELU, {{"X", "Y"}, {"Out"}}},
150
        {G_OP_TYPE_FUSION_FC_RELU, {{"X", "Y", "Z"}, {"Out"}}},
qnqinan's avatar
qnqinan 已提交
151
        {G_OP_TYPE_REGION, {{"X"}, {"Out"}}},
152
        {G_OP_TYPE_FUSION_CONV_BN, {{"Input"}, {"Y"}}},
xiebaiyuan's avatar
xiebaiyuan 已提交
153 154 155 156
        {G_OP_TYPE_LOOKUP_TABLE, {{"W", "Ids"}, {"Out"}}},
        {G_OP_TYPE_GRU,
         {{"Input", "H0", "Weight", "Bias"},
          {"BatchGate", "BatchResetHiddenPrev", "BatchHidden", "Hidden"}}},
Z
zhaojiaying01 已提交
157 158 159
        {G_OP_TYPE_GRU_UNIT,
         {{"Input", "HiddenPrev", "Weight", "Bias"},
          {"Gate", "ResetHiddenPrev", "Hidden"}}},
xiebaiyuan's avatar
xiebaiyuan 已提交
160
        {G_OP_TYPE_CRF, {{"Emission", "Transition", "Label"}, {"ViterbiPath"}}},
161 162 163
        {G_OP_TYPE_BILINEAR_INTERP, {{"OutSize", "X"}, {"Out"}}},
        {G_OP_TYPE_FLATTEN, {{"X"}, {"Out"}}},
        {G_OP_TYPE_SHAPE, {{"Input"}, {"Out"}}},
164
        {G_OP_TYPE_CONV_TRANSPOSE, {{"Input"}, {"Output"}}},
E
eclipsess 已提交
165
        {G_OP_TYPE_SUM, {{"X"}, {"Out"}}},
H
hjchen2 已提交
166 167
        {G_OP_TYPE_TOP_K, {{"X"}, {"Out", "Indices"}}},
        {G_OP_TYPE_CAST, {{"X"}, {"Out"}}},
168
        {G_OP_TYPE_QUANTIZE, {{"X"}, {"Out", "OutScale"}}},
Z
zhangyang 已提交
169
        {G_OP_TYPE_DEQUANTIZE, {{"X", "Scale"}, {"Out"}}},
170 171
        {G_OP_TYPE_FUSION_DEQUANT_BN, {{"X", "Scale"}, {"Out"}}},
        {G_OP_TYPE_FUSION_DEQUANT_ADD_BN, {{"X", "Scale"}, {"Out"}}},
172
        {G_OP_TYPE_FUSION_DEQUANT_BN_RELU, {{"X", "Scale"}, {"Out"}}},
H
hjchen2 已提交
173
        {G_OP_TYPE_FUSION_DEQUANT_ADD_BN_RELU, {{"X", "Scale"}, {"Out"}}},
174 175 176 177
        {G_OP_TYPE_FUSION_DEQUANT_ADD_BN_RELU_QUANT,
         {{"X", "Scale"}, {"Out", "OutScale"}}},
        {G_OP_TYPE_FUSION_DEQUANT_ADD_BN_QUANT,
         {{"X", "Scale"}, {"Out", "OutScale"}}},
Z
zhangyang 已提交
178
        {G_OP_TYPE_TANH, {{"X"}, {"Out"}}},
qnqinan's avatar
qnqinan 已提交
179 180
        {G_OP_TYPE_FUSION_DECONV_RELU, {{"Input"}, {"Out"}}},
        {G_OP_TYPE_FUSION_DECONV_ADD, {{"Input"}, {"Out"}}},
181 182
        {G_OP_TYPE_FUSION_DECONV_ADD_RELU, {{"Input"}, {"Out"}}},
        {G_OP_TYPE_SEQUENCE_EXPAND, {{"X", "Y"}, {"Out"}}},
H
hjchen2 已提交
183
        {G_OP_TYPE_SEQUENCE_POOL, {{"X"}, {"Out"}}},
Z
zhaojiaying01 已提交
184
        {G_OP_TYPE_SEQUENCE_SOFTMAX, {{"X"}, {"Out"}}},
185 186
        {G_OP_TYPE_NORM, {{"X"}, {"Out", "Norm"}}},
        {G_OP_TYPE_LOG, {{"X"}, {"Out"}}},
187
        {G_OP_TYPE_LOD_RESET, {{"X", "Y"}, {"Out"}}},
188 189 190 191 192
        {G_OP_TYPE_LESS_THAN, {{"X", "Y"}, {"Out"}}},
        {G_OP_TYPE_LOGICAL_AND, {{"X", "Y"}, {"Out"}}},
        {G_OP_TYPE_LOGICAL_OR, {{"X", "Y"}, {"Out"}}},
        {G_OP_TYPE_LOGICAL_XOR, {{"X", "Y"}, {"Out"}}},
        {G_OP_TYPE_LOGICAL_NOT, {{"X"}, {"Out"}}}};
L
liuruilong 已提交
193
}  // namespace paddle_mobile