types.h 7.9 KB
Newer Older
L
liuruilong 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

D
dolphin8 已提交
15
#pragma once
朔-望's avatar
朔-望 已提交
16

17
#include <map>
L
liuruilong 已提交
18 19
#include <string>
#include <unordered_map>
20
#include <utility>
D
dolphin8 已提交
21
#include <vector>
L
liuruilong 已提交
22

朔-望's avatar
朔-望 已提交
23
namespace paddle_mobile {
H
hanbuhe 已提交
24 25 26
enum class Precision : int { FP32 = 0, FP16 = 1 };

typedef int16_t half;
朔-望's avatar
朔-望 已提交
27

L
liuruilong 已提交
28
template <Precision p>
L
liuruilong 已提交
29
struct PrecisionTrait {
L
liuruilong 已提交
30 31 32 33
  typedef void ptype;
};

template <>
L
liuruilong 已提交
34
struct PrecisionTrait<Precision::FP32> {
L
liuruilong 已提交
35 36
  typedef float ptype;
};
H
hanbuhe 已提交
37 38 39 40
template <>
struct PrecisionTrait<Precision::FP16> {
  typedef half ptype;
};
L
liuruilong 已提交
41

朔-望's avatar
朔-望 已提交
42
//! device type
43 44 45 46 47 48 49
enum DeviceTypeEnum {
  kINVALID = -1,
  kCPU = 0,
  kFPGA = 1,
  kGPU_MALI = 2,
  kGPU_CL = 3
};
朔-望's avatar
朔-望 已提交
50

朔-望's avatar
朔-望 已提交
51 52
template <DeviceTypeEnum T>
struct DeviceType {};
朔-望's avatar
朔-望 已提交
53

朔-望's avatar
朔-望 已提交
54 55 56
typedef DeviceType<kCPU> CPU;
typedef DeviceType<kFPGA> FPGA;
typedef DeviceType<kGPU_MALI> GPU_MALI;
L
liuruilong 已提交
57
typedef DeviceType<kGPU_CL> GPU_CL;
朔-望's avatar
朔-望 已提交
58

朔-望's avatar
朔-望 已提交
59 60
//! data type
enum DataType {
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
  PM_INVALID = -1,
  PM_HALF = 0,
  PM_FLOAT = 1,
  PM_DOUBLE = 2,
  PM_INT8 = 3,
  PM_INT16 = 4,
  PM_INT32 = 5,
  PM_INT64 = 6,
  PM_UINT8 = 7,
  PM_UINT16 = 8,
  PM_UINT32 = 9,
  PM_STRING = 10,
  PM_BOOL = 11,
  PM_SHAPE = 12,
  PM_TENSOR = 13
朔-望's avatar
朔-望 已提交
76 77 78
};
//!
enum PMStatus {
79 80 81 82 83 84 85 86 87
  PMSuccess = 0xFF,        /*!< No errors */
  PMNotInitialized = 0x01, /*!< Data not initialized. */
  PMInvalidValue = 0x02,   /*!< Incorrect variable value. */
  PMMemAllocFailed = 0x03, /*!< Memory allocation error. */
  PMUnKownError = 0x04,    /*!< Unknown error. */
  PMOutOfAuthority = 0x05, /*!< Try to modified data not your own*/
  PMOutOfMem = 0x06,       /*!< OOM error*/
  PMUnImplError = 0x07,    /*!< Unimplement error. */
  PMWrongDevice = 0x08     /*!< un-correct device. */
朔-望's avatar
朔-望 已提交
88
};
L
liuruilong 已提交
89

90
enum RoundType {
91 92 93 94 95 96
  ROUND_NEAREST_AWAY_ZERO = 0,
  ROUND_NEAREST_TOWARDS_ZERO = 1,
  ROUND_NEAREST_TO_EVEN = 2,
};

enum ActivationType {
97 98 99 100 101 102 103
  IDENTITY = 0,
  RELU = 1,
  RELU6 = 2,
  PRELU = 3,
  LEAKY_RELU = 4,
  TANH = 5,
  SIGMOID = 6,
104
  LOG = 7,
105 106
};

H
hjchen2 已提交
107
enum PoolingType {
108 109
  MAX = 0,
  AVG = 1,
110 111
  SUM = 2,
  FIRST = 3,
112
  LAST = 4,
H
hjchen2 已提交
113 114
};

115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
enum PowerMode {
  PERFORMANCE_PRIORITY = 0,  // let threads run on big cores if
                             // thread_num <= big_cores_num,
                             // otherwise the power mode will be
                             // set to AUTO and all threads are
                             // scheduled by system
  EFFICIENCY_PRIORITY = 1,   // let threads run on little cores if
                             // thread_num <= little_cores_num,
                             // otherwise the power mode will be
                             // set to AUTO and all threads are
                             // scheduled by system
  PERFORMANCE_ONLY = 2,      // force threads run on big cores,
                             // and the remains are ignored if
                             // exceed the number big cores
  EFFICIENCY_ONLY = 3,       // force threads run on little cores,
                             // and the remains are ignored if
                             // exceed the number of little cores
  AUTO = 4,                  // scheduled by system
};

135 136 137 138 139 140
enum MemoryOptimizationLevel {
  NoMemoryOptimization = 0,
  MemoryOptimizationWithoutFeeds = 1,
  FullMemoryOptimization = 2,
};

L
liuruilong 已提交
141 142
struct PaddleMobileConfigInternal {
  bool load_when_predict = false;
143
  MemoryOptimizationLevel memory_optimization_level = FullMemoryOptimization;
L
liuruilong 已提交
144 145
};

146 147 148 149 150
extern const char *G_OP_TYPE_CONV;
extern const char *G_OP_TYPE_BATCHNORM;
extern const char *G_OP_TYPE_BOX_CODER;
extern const char *G_OP_TYPE_CONCAT;
extern const char *G_OP_TYPE_ELEMENTWISE_ADD;
151 152
extern const char *G_OP_TYPE_ELEMENTWISE_SUB;
extern const char *G_OP_TYPE_ELEMENTWISE_MUL;
153
extern const char *G_OP_TYPE_FUSION_CONV_ADD_RELU;
154 155
extern const char *G_OP_TYPE_FUSION_CONV_ADD_PRELU;
extern const char *G_OP_TYPE_FUSION_CONV_ADD_ADD_PRELU;
156 157 158
extern const char *G_OP_TYPE_FC;
extern const char *G_OP_TYPE_FUSION_CONV_ADD;
extern const char *G_OP_TYPE_FUSION_CONV_ADD_BN_RELU;
159
extern const char *G_OP_TYPE_FUSION_CONV_BN_ADD_RELU;
160 161
extern const char *G_OP_TYPE_FUSION_DWCONV_BN_RELU;
extern const char *G_OP_TYPE_FUSION_CONV_BN_RELU;
162
extern const char *G_OP_TYPE_FUSION_CONV_RELU;
163

Z
zhaojiaying01 已提交
164 165
extern const char *G_OP_TYPE_GRU;
extern const char *G_OP_TYPE_GRU_UNIT;
166 167 168 169 170 171
extern const char *G_OP_TYPE_CRF;
extern const char *G_OP_TYPE_BILINEAR_INTERP;
extern const char *G_OP_TYPE_NEAREST_INTERP;
extern const char *G_OP_TYPE_FLATTEN;
extern const char *G_OP_TYPE_FLATTEN2;
extern const char *G_OP_TYPE_SHAPE;
172 173 174
extern const char *G_OP_TYPE_LRN;
extern const char *G_OP_TYPE_MUL;
extern const char *G_OP_TYPE_MULTICLASS_NMS;
Z
zhaojiaying01 已提交
175
extern const char *G_OP_TYPE_NORM;
176 177 178
extern const char *G_OP_TYPE_POOL2D;
extern const char *G_OP_TYPE_PRIOR_BOX;
extern const char *G_OP_TYPE_RELU;
179
extern const char *G_OP_TYPE_RELU6;
180
extern const char *G_OP_TYPE_LEAKY_RELU;
181
extern const char *G_OP_TYPE_RESHAPE;
J
Jiaying Zhao 已提交
182
extern const char *G_OP_TYPE_SCALE;
183 184 185 186 187 188 189 190 191 192 193 194 195 196 197
extern const char *G_OP_TYPE_SIGMOID;
extern const char *G_OP_TYPE_SOFTMAX;
extern const char *G_OP_TYPE_TRANSPOSE;
extern const char *G_OP_TYPE_SPLIT;
extern const char *G_OP_TYPE_FEED;
extern const char *G_OP_TYPE_FETCH;
extern const char *G_OP_TYPE_DEPTHWISE_CONV;
extern const char *G_OP_TYPE_IM2SEQUENCE;
extern const char *G_OP_TYPE_DROPOUT;

extern const char *G_OP_TYPE_FUSION_CONV_ADD_BN;
extern const char *G_OP_TYPE_FUSION_POOL_BN;
extern const char *G_OP_TYPE_FUSION_ELEMENTWISE_ADD_RELU;
extern const char *G_OP_TYPE_FUSION_FC_RELU;
extern const char *G_OP_TYPE_REGION;
qnqinan's avatar
qnqinan 已提交
198
extern const char *G_OP_TYPE_FUSION_CONV_BN;
199 200
extern const char *G_OP_TYPE_CONV_TRANSPOSE;
extern const char *G_OP_TYPE_PRELU;
E
eclipsess 已提交
201
extern const char *G_OP_TYPE_SUM;
H
hjchen2 已提交
202 203
extern const char *G_OP_TYPE_TOP_K;
extern const char *G_OP_TYPE_CAST;
204 205
extern const char *G_OP_TYPE_LOG;
extern const char *G_OP_TYPE_LOD_RESET;
206
extern const char *G_OP_TYPE_LESS_THAN;
Z
zhaojiaying01 已提交
207 208 209 210
extern const char *G_OP_TYPE_LOGICAL_AND;
extern const char *G_OP_TYPE_LOGICAL_OR;
extern const char *G_OP_TYPE_LOGICAL_NOT;
extern const char *G_OP_TYPE_LOGICAL_XOR;
211 212
extern const char *G_OP_TYPE_WRITE_TO_ARRAY;
extern const char *G_OP_TYPE_READ_FROM_ARRAY;
Z
zhaojiaying01 已提交
213 214
extern const char *G_OP_TYPE_IS_EMPTY;
extern const char *G_OP_TYPE_INCREMENT;
Z
zhangyang 已提交
215

216 217
extern const char *G_OP_TYPE_QUANTIZE;
extern const char *G_OP_TYPE_DEQUANTIZE;
218
extern const char *G_OP_TYPE_FUSION_DEQUANT_BN;
219 220
extern const char *G_OP_TYPE_FUSION_DEQUANT_ADD_BN;
extern const char *G_OP_TYPE_FUSION_DEQUANT_BN_RELU;
H
hjchen2 已提交
221
extern const char *G_OP_TYPE_FUSION_DEQUANT_ADD_BN_RELU;
222 223
extern const char *G_OP_TYPE_FUSION_DEQUANT_ADD_BN_QUANT;
extern const char *G_OP_TYPE_FUSION_DEQUANT_ADD_BN_RELU_QUANT;
224

Z
zhangyang 已提交
225 226 227
extern const char *G_OP_TYPE_TANH;
extern const char *G_OP_TYPE_FUSION_DECONV_RELU;

qnqinan's avatar
qnqinan 已提交
228 229 230
extern const char *G_OP_TYPE_FUSION_DECONV_ADD;
extern const char *G_OP_TYPE_FUSION_DECONV_ADD_RELU;

231 232
extern const char *G_OP_TYPE_SEQUENCE_EXPAND;
extern const char *G_OP_TYPE_SEQUENCE_POOL;
H
hjchen2 已提交
233
extern const char *G_OP_TYPE_SEQUENCE_SOFTMAX;
234

235 236 237 238
extern const char *G_OP_TYPE_SLICE;
extern const char *G_OP_TYPE_ANCHOR_GENERATOR;
extern const char *G_OP_TYPE_GENERATE_PROPOSALS;
extern const char *G_OP_TYPE_PSROI_POOL;
239
extern const char *G_OP_TYPE_ROIALIGN_POOL;
H
hjchen2 已提交
240
extern const char *G_OP_TYPE_ROI_PERSPECTIVE;
241 242 243
extern const char *G_OP_TYPE_PAD2D;
extern const char *G_OP_TYPE_FUSION_DECONV_ADD_BN_RELU;
extern const char *G_OP_TYPE_FUSION_DECONV_ADD_BN;
244
extern const char *G_OP_TYPE_FUSION_DECONV_BN_RELU;
245

H
hjchen2 已提交
246 247
extern const char *G_OP_TYPE_PAD2D;

L
liuruilong 已提交
248
extern std::unordered_map<
L
liuruilong 已提交
249
    std::string, std::pair<std::vector<std::string>, std::vector<std::string>>>
L
liuruilong 已提交
250 251
    op_input_output_key;

252 253
typedef std::map<std::string, std::vector<std::string>> VariableNameMap;

朔-望's avatar
朔-望 已提交
254
}  // namespace paddle_mobile