op_param.h 51.0 KB
Newer Older
W
wangliu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
朔-望's avatar
朔-望 已提交
14

15
#pragma once
朔-望's avatar
朔-望 已提交
16

E
eclipsess 已提交
17
#include <string>
W
wangliu 已提交
18
#include <vector>
L
liuruilong 已提交
19
#include "common/log.h"
朔-望's avatar
朔-望 已提交
20
#include "common/type_define.h"
N
nhzlx 已提交
21
#include "common/types.h"
朔-望's avatar
朔-望 已提交
22 23 24 25
#include "framework/lod_tensor.h"
#include "framework/scope.h"
#include "framework/tensor.h"
#include "framework/variable.h"
Z
zhangyang 已提交
26
#ifdef PADDLE_MOBILE_FPGA
H
hanbuhe 已提交
27
#include "fpga/api.h"
Z
zhangyang 已提交
28
#endif
朔-望's avatar
朔-望 已提交
29 30

namespace paddle_mobile {
朔-望's avatar
朔-望 已提交
31 32
namespace operators {

W
wangliu 已提交
33 34 35 36 37 38 39
using framework::Attribute;
using framework::AttributeMap;
using framework::LoDTensor;
using framework::Scope;
using framework::Tensor;
using std::string;
using std::vector;
朔-望's avatar
朔-望 已提交
40

N
nhzlx 已提交
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
template <typename Dtype>
struct DtypeTensorTrait {
  typedef void ptype;
  typedef void rtype;
};

template <>
struct DtypeTensorTrait<CPU> {
  // This is the type we obtained in variable.
  typedef framework::LoDTensor gtype;
  // This type will be the parent class type
  // or the same type.
  typedef framework::Tensor rtype;
};

template <>
struct DtypeTensorTrait<FPGA> {
  // This is the type we obtained in variable.
  typedef framework::LoDTensor gtype;
  // This type will be the parent class type
  // or the same type.
  typedef framework::Tensor rtype;
};

template <>
struct DtypeTensorTrait<GPU_MALI> {
  // This is the type we obtained in variable.
  typedef framework::LoDTensor gtype;
  // This type will be the parent class type
  // or the same type.
  typedef framework::Tensor rtype;
};

L
liuruilong 已提交
74
class OpParam {
朔-望's avatar
朔-望 已提交
75
 protected:
76 77 78 79 80
  template <typename T>
  static T *InputAlphaFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Alpha", inputs, scope);
  }

81 82 83 84 85 86 87 88 89 90 91 92 93 94 95
  template <typename T>
  static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Input", inputs, scope);
  }

  template <typename T>
  static T *InputXFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("X", inputs, scope);
  }

  template <typename T>
  static T *InputYFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Y", inputs, scope);
  }

E
eclipsess 已提交
96 97 98 99 100
  template <typename T>
  static T *InputZFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Z", inputs, scope);
  }

101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117
  template <typename T>
  static T *InputBiasFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Bias", inputs, scope);
  }
  template <typename T>
  static T *InputVarianceFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Variance", inputs, scope);
  }
  template <typename T>
  static T *InputMeanFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Mean", inputs, scope);
  }
  template <typename T>
  static T *InputScaleFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scale", inputs, scope);
  }
E
eclipsess 已提交
118 119 120 121
  template <typename T>
  static T *InputImageFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Image", inputs, scope);
  }
E
eclipsess 已提交
122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137
  template <typename T>
  static T *InputPriorBoxFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("PriorBox", inputs, scope);
  }
  template <typename T>
  static T *InputPriorBoxVarFrom(const VariableNameMap &inputs,
                                 const Scope &scope) {
    return GetVarValue<T>("PriorBoxVar", inputs, scope);
  }
  // LoDTensor but now use Tensor
  template <typename T>
  static T *InputTargetBoxFrom(const VariableNameMap &inputs,
                               const Scope &scope) {
    return GetVarValue<T>("TargetBox", inputs, scope);
  }
138

E
eclipsess 已提交
139 140 141 142 143 144 145 146 147 148
  template <typename T>
  static T *InputBBoxesFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("BBoxes", inputs, scope);
  }

  template <typename T>
  static T *InputScoresFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scores", inputs, scope);
  }

E
eclipsess 已提交
149 150 151 152
  template <typename T>
  static T *InputShapeFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Shape", inputs, scope);
  }
E
eclipsess 已提交
153

154
  template <typename T>
W
wangliu 已提交
155 156
  static vector<T *> InputMultiFrom(const VariableNameMap &inputs,
                                    const Scope &scope) {
157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174
    return GetMultiVarValue<T>("X", inputs, scope);
  }

  template <typename T>
  static T *OutputFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Output", outputs, scope);
  }

  template <typename T>
  static T *OutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Out", outputs, scope);
  }

  template <typename T>
  static T *OutputYFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Y", outputs, scope);
  }

E
eclipsess 已提交
175 176 177 178 179 180
  template <typename T>
  static T *OutputBoxesFrom(const VariableNameMap &outputs,
                            const Scope &scope) {
    return GetVarValue<T>("Boxes", outputs, scope);
  }

E
eclipsess 已提交
181 182 183 184 185
  template <typename T>
  static T *OutputBoxFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("OutputBox", outputs, scope);
  }

E
eclipsess 已提交
186 187 188 189 190 191
  template <typename T>
  static T *OutputVariancesFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("Variances", outputs, scope);
  }

192 193 194 195 196 197 198 199 200 201 202
  template <typename T>
  static T *MidOutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("MidOut", outputs, scope);
  }

  template <typename T>
  static T *FilterFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Filter", inputs, scope);
  }

  template <typename T>
W
wangliu 已提交
203
  static const T GetAttr(const string &key, const AttributeMap &map) {
204 205 206
    return ((Attribute)map.at(key)).Get<T>();
  }

207 208 209 210
  static const bool HasAttr(const string &key, const AttributeMap &map) {
    return map.count(key) > 0;
  }

211
  template <typename T>
W
wangliu 已提交
212
  static T *GetVarValue(const string &key, const VariableNameMap &var_map,
213
                        const Scope &scope) {
W
wangliu 已提交
214 215
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
216 217 218 219 220 221
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
朔-望's avatar
朔-望 已提交
222
    }
223
  }
朔-望's avatar
朔-望 已提交
224

225
  template <typename T>
W
wangliu 已提交
226 227 228
  static vector<T *> GetMultiVarValue(const string &key,
                                      const VariableNameMap &var_map,
                                      const Scope &scope) {
229 230
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
W
wangliu 已提交
231
    vector<T *> var_res;
232 233 234
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var->GetMutable<T>());
朔-望's avatar
朔-望 已提交
235
    }
236 237
    return var_res;
  }
朔-望's avatar
朔-望 已提交
238 239
};

L
liuruilong 已提交
240
#ifdef CONV_OP
N
nhzlx 已提交
241
template <typename Dtype>
朔-望's avatar
朔-望 已提交
242
class ConvParam : OpParam {
N
nhzlx 已提交
243 244 245
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
246
 public:
247
  ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
248
            const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
249 250 251
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_ = OutputFrom<GType>(outputs, scope);
W
wangliu 已提交
252 253 254
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
255 256
    groups = GetAttr<int>("groups", attrs);
  }
朔-望's avatar
朔-望 已提交
257

N
nhzlx 已提交
258
  const RType *Input() const { return input_; }
朔-望's avatar
朔-望 已提交
259

N
nhzlx 已提交
260
  RType *Filter() const { return filter_; }
朔-望's avatar
朔-望 已提交
261

N
nhzlx 已提交
262
  RType *Output() const { return output_; }
朔-望's avatar
朔-望 已提交
263

W
wangliu 已提交
264
  const vector<int> &Strides() const { return strides_; }
朔-望's avatar
朔-望 已提交
265

W
wangliu 已提交
266
  const vector<int> &Paddings() const { return paddings_; }
朔-望's avatar
朔-望 已提交
267

W
wangliu 已提交
268
  const vector<int> &Dilations() const { return dilations_; }
朔-望's avatar
朔-望 已提交
269

270
  const int &Groups() const { return groups; }
朔-望's avatar
朔-望 已提交
271

朔-望's avatar
朔-望 已提交
272
 private:
N
nhzlx 已提交
273 274 275
  RType *input_;
  RType *output_;
  RType *filter_;
W
wangliu 已提交
276 277 278
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
279
  int groups;
朔-望's avatar
朔-望 已提交
280
};
N
nhzlx 已提交
281 282
template <typename Dtype>
Print &operator<<(Print &printer, const ConvParam<Dtype> &conv_param);
L
liuruilong 已提交
283
#endif
朔-望's avatar
朔-望 已提交
284

N
nhzlx 已提交
285
template <typename Dtype>
朔-望's avatar
朔-望 已提交
286
class ElementwiseAddParam : OpParam {
N
nhzlx 已提交
287 288 289
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
290
 public:
291
  ElementwiseAddParam(const VariableNameMap &inputs,
292 293
                      const VariableNameMap &outputs, const AttributeMap &attrs,
                      const Scope &scope) {
N
nhzlx 已提交
294 295 296
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_y_ = InputYFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
297 298 299
    axis_ = GetAttr<int>("axis", attrs);
  }

N
nhzlx 已提交
300
  const RType *InputX() const { return input_x_; }
301

N
nhzlx 已提交
302
  const RType *InputY() const { return input_y_; }
303

N
nhzlx 已提交
304
  RType *Out() const { return out_; }
305 306 307

  const int &Axis() const { return axis_; }

朔-望's avatar
朔-望 已提交
308
 private:
N
nhzlx 已提交
309 310 311
  RType *input_x_;
  RType *input_y_;
  RType *out_;
312
  int axis_;
Z
zhangyang 已提交
313 314 315
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
316
  fpga::EWAddArgs fpga_EW_add_args;
Z
zhangyang 已提交
317 318

 public:
H
hanbuhe 已提交
319 320
  const fpga::EWAddArgs &FpgaArgs() const { return fpga_EW_add_args; }
  void SetFpgaArgs(const fpga::EWAddArgs &args) { fpga_EW_add_args = args; }
Z
zhangyang 已提交
321
#endif
朔-望's avatar
朔-望 已提交
322 323
};

324
#ifdef FUSION_ELEMENTWISEADDRELU_OP
N
nhzlx 已提交
325 326
template <typename Dtype>
using ElementwiseAddReluParam = ElementwiseAddParam<Dtype>;
L
liuruilong 已提交
327 328 329
#endif

#ifdef MUL_OP
N
nhzlx 已提交
330
template <typename Dtype>
朔-望's avatar
朔-望 已提交
331
class MulParam : OpParam {
N
nhzlx 已提交
332 333 334
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
335
 public:
336
  MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
337
           const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
338 339 340
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_y_ = InputYFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
341 342 343
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
  }
朔-望's avatar
朔-望 已提交
344

N
nhzlx 已提交
345
  const RType *InputX() const { return input_x_; }
朔-望's avatar
朔-望 已提交
346

N
nhzlx 已提交
347
  const RType *InputY() const { return input_y_; }
朔-望's avatar
朔-望 已提交
348

N
nhzlx 已提交
349
  RType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
350

351
  const int &XNumColDims() const { return x_num_col_dims_; }
朔-望's avatar
朔-望 已提交
352

353
  const int &YNumColDims() const { return y_num_col_dims_; }
朔-望's avatar
朔-望 已提交
354

朔-望's avatar
朔-望 已提交
355
 private:
N
nhzlx 已提交
356 357 358
  RType *input_x_;
  RType *input_y_;
  RType *out_;
359 360
  int x_num_col_dims_;
  int y_num_col_dims_;
朔-望's avatar
朔-望 已提交
361
};
L
liuruilong 已提交
362
#endif
朔-望's avatar
朔-望 已提交
363

L
liuruilong 已提交
364
#ifdef CONCAT_OP
N
nhzlx 已提交
365
template <typename Dtype>
朔-望's avatar
朔-望 已提交
366
class ConcatParam : public OpParam {
N
nhzlx 已提交
367 368 369
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
370
 public:
371
  ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
372
              const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
373 374
    inputs_ = InputMultiFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
375 376
    axis_ = GetAttr<int>("axis", attrs);
  }
朔-望's avatar
朔-望 已提交
377

N
nhzlx 已提交
378
  vector<GType *> Inputs() const { return inputs_; }
朔-望's avatar
朔-望 已提交
379

N
nhzlx 已提交
380
  RType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
381

382
  const int &Axis() const { return axis_; }
朔-望's avatar
朔-望 已提交
383

朔-望's avatar
朔-望 已提交
384
 private:
N
nhzlx 已提交
385 386
  vector<GType *> inputs_;
  RType *out_;
387
  int axis_;
朔-望's avatar
朔-望 已提交
388
};
L
liuruilong 已提交
389
#endif
朔-望's avatar
朔-望 已提交
390

L
liuruilong 已提交
391
#ifdef LRN_OP
N
nhzlx 已提交
392
template <typename Dtype>
E
eclipsess 已提交
393
class LrnParam : public OpParam {
N
nhzlx 已提交
394 395 396
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
397
 public:
398
  LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
399
           const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
400 401 402
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
    mid_out_ = MidOutFrom<GType>(outputs, scope);
403 404 405 406
    n_ = GetAttr<int>("n", attrs);
    alpha_ = GetAttr<float>("alpha", attrs);
    beta_ = GetAttr<float>("beta", attrs);
    k_ = GetAttr<float>("k", attrs);
W
wangliu 已提交
407
    data_format_ = GetAttr<string>("data_format", attrs);
408
  }
E
eclipsess 已提交
409

N
nhzlx 已提交
410
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
411

N
nhzlx 已提交
412
  RType *Out() const { return out_; }
E
eclipsess 已提交
413

N
nhzlx 已提交
414
  RType *MidOut() const { return mid_out_; }
E
eclipsess 已提交
415

416
  const int &N() const { return n_; }
E
eclipsess 已提交
417

418
  const float &Alpha() const { return alpha_; }
E
eclipsess 已提交
419

420
  const float &Beta() const { return beta_; }
E
eclipsess 已提交
421

422
  const float &K() const { return k_; }
E
eclipsess 已提交
423

W
wangliu 已提交
424
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
425

朔-望's avatar
朔-望 已提交
426
 private:
N
nhzlx 已提交
427 428 429
  RType *input_x_;
  RType *out_;
  RType *mid_out_;
430 431 432 433
  int n_;
  float alpha_;
  float beta_;
  float k_;
W
wangliu 已提交
434
  string data_format_;
E
eclipsess 已提交
435
};
L
liuruilong 已提交
436 437 438
#endif

#ifdef BATCHNORM_OP
N
nhzlx 已提交
439
template <typename Dtype>
E
eclipsess 已提交
440
class BatchNormParam : OpParam {
N
nhzlx 已提交
441 442 443
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
444
 public:
445
  BatchNormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
446
                 const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
447 448 449 450 451 452
    input_x_ = InputXFrom<GType>(inputs, scope);
    output_y_ = OutputYFrom<GType>(outputs, scope);
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    input_mean_ = InputMeanFrom<GType>(inputs, scope);
    input_scale_ = InputScaleFrom<GType>(inputs, scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, scope);
453 454
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
L
liuruilong 已提交
455
    //    is_test_ = GetAttr<bool>("is_test", attrs);
456
  }
E
eclipsess 已提交
457

N
nhzlx 已提交
458
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
459

N
nhzlx 已提交
460
  RType *OutputY() const { return output_y_; }
E
eclipsess 已提交
461

N
nhzlx 已提交
462
  const RType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
463

N
nhzlx 已提交
464
  const RType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
465

N
nhzlx 已提交
466
  const RType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
467

N
nhzlx 已提交
468
  const RType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
469

470
  const float &Epsilon() const { return epsilon_; }
E
eclipsess 已提交
471

472
  const float &Momentum() const { return momentum_; }
E
eclipsess 已提交
473

474
  const bool &IsTest() const { return is_test_; }
E
eclipsess 已提交
475

W
wangliu 已提交
476
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
477

朔-望's avatar
朔-望 已提交
478
 private:
N
nhzlx 已提交
479 480 481 482 483 484
  RType *input_x_;
  RType *output_y_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
485 486 487
  float epsilon_;
  float momentum_;
  bool is_test_;
W
wangliu 已提交
488
  string data_format_;
E
eclipsess 已提交
489
};
L
liuruilong 已提交
490 491 492
#endif

#ifdef POOL_OP
N
nhzlx 已提交
493
template <typename Dtype>
494
class PoolParam : public OpParam {
N
nhzlx 已提交
495 496 497
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
498
 public:
499
  PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
500
            const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
501
    input_ = InputXFrom<GType>(inputs, scope);
502

N
nhzlx 已提交
503
    output_ = OutFrom<GType>(outputs, scope);
W
wangliu 已提交
504 505 506 507
    pooling_type_ = GetAttr<string>("pooling_type", attrs);
    ksize_ = GetAttr<vector<int>>("ksize", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
508
    ceil_mode_ = GetAttr<bool>("ceil_mode", attrs);
509
    global_pooling_ = GetAttr<bool>("global_pooling", attrs);
510
  }
511

N
nhzlx 已提交
512
  const RType *Input() const { return input_; }
513

N
nhzlx 已提交
514
  RType *Output() const { return output_; }
515

W
wangliu 已提交
516
  const string &PoolingType() const { return pooling_type_; }
517

W
wangliu 已提交
518
  const vector<int> &Ksize() const { return ksize_; }
519

W
wangliu 已提交
520
  const vector<int> &Strides() const { return strides_; }
521

W
wangliu 已提交
522
  const vector<int> &Paddings() const { return paddings_; }
523

524
  bool isCeilMode() const { return ceil_mode_; }
525

Z
zhangyang 已提交
526
  bool isGlobalPooling() const { return global_pooling_; }
527

朔-望's avatar
朔-望 已提交
528
 private:
N
nhzlx 已提交
529 530
  RType *input_;
  RType *output_;
W
wangliu 已提交
531 532 533 534
  string pooling_type_;
  vector<int> ksize_;
  vector<int> strides_;
  vector<int> paddings_;
535
  bool ceil_mode_;
536
  bool global_pooling_ = false;
Z
zhangyang 已提交
537
#ifdef PADDLE_MOBILE_FPGA
538 539

 private:
H
hanbuhe 已提交
540
  fpga::PoolingArgs fpga_pool_args;
Z
zhangyang 已提交
541 542

 public:
H
hanbuhe 已提交
543 544
  const fpga::PoolingArgs &FpgaArgs() const { return fpga_pool_args; }
  void SetFpgaArgs(const fpga::PoolingArgs &args) { fpga_pool_args = args; }
Z
zhangyang 已提交
545
#endif
546
};
L
liuruilong 已提交
547 548 549
#endif

#ifdef PRIORBOX_OP
N
nhzlx 已提交
550
template <typename Dtype>
E
eclipsess 已提交
551
class PriorBoxParam : public OpParam {
N
nhzlx 已提交
552 553 554
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
555 556
 public:
  PriorBoxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
557
                const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
558 559 560 561
    input_ = InputFrom<GType>(inputs, scope);
    input_image_ = InputImageFrom<GType>(inputs, scope);
    output_boxes_ = OutputBoxesFrom<GType>(outputs, scope);
    output_variances_ = OutputVariancesFrom<GType>(outputs, scope);
W
wangliu 已提交
562 563 564 565
    min_sizes_ = GetAttr<vector<float>>("min_sizes", attrs);
    max_sizes_ = GetAttr<vector<float>>("max_sizes", attrs);
    aspect_ratios_ = GetAttr<vector<float>>("aspect_ratios", attrs);
    variances_ = GetAttr<vector<float>>("variances", attrs);
E
eclipsess 已提交
566 567 568 569 570 571
    flip_ = GetAttr<bool>("flip", attrs);
    clip_ = GetAttr<bool>("clip", attrs);
    step_w_ = GetAttr<float>("step_w", attrs);
    step_h_ = GetAttr<float>("step_h", attrs);
    offset_ = GetAttr<float>("offset", attrs);
  }
N
nhzlx 已提交
572
  const RType *Input() const { return input_; }
E
eclipsess 已提交
573

N
nhzlx 已提交
574
  const RType *InputImage() const { return input_image_; }
E
eclipsess 已提交
575

N
nhzlx 已提交
576
  RType *OutputBoxes() const { return output_boxes_; }
E
eclipsess 已提交
577

N
nhzlx 已提交
578
  RType *OutputVariances() const { return output_variances_; }
E
eclipsess 已提交
579

W
wangliu 已提交
580
  const vector<float> &MinSizes() const { return min_sizes_; }
E
eclipsess 已提交
581

W
wangliu 已提交
582
  const vector<float> &MaxSizes() const { return max_sizes_; }
E
eclipsess 已提交
583

W
wangliu 已提交
584
  const vector<float> &AspectRatios() const { return aspect_ratios_; }
E
eclipsess 已提交
585

W
wangliu 已提交
586
  const vector<float> &Variances() const { return variances_; }
E
eclipsess 已提交
587 588 589 590 591 592 593 594 595 596 597 598

  const bool &Flip() const { return flip_; }

  const bool &Clip() const { return clip_; }

  const float &StepW() const { return step_w_; }

  const float &StepH() const { return step_h_; }

  const float &Offset() const { return offset_; }

 private:
N
nhzlx 已提交
599 600 601 602
  RType *input_;
  RType *input_image_;
  RType *output_boxes_;
  RType *output_variances_;
W
wangliu 已提交
603 604 605 606
  vector<float> min_sizes_;
  vector<float> max_sizes_;
  vector<float> aspect_ratios_;
  vector<float> variances_;
E
eclipsess 已提交
607 608 609 610 611 612
  bool flip_;
  bool clip_;
  float step_w_;
  float step_h_;
  float offset_;
};
L
liuruilong 已提交
613
#endif
E
eclipsess 已提交
614

L
liuruilong 已提交
615
#ifdef BOXCODER_OP
N
nhzlx 已提交
616
template <typename Dtype>
E
eclipsess 已提交
617
class BoxCoderParam : public OpParam {
N
nhzlx 已提交
618 619 620
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
621 622
 public:
  BoxCoderParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
623
                const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
624 625 626 627
    input_priorbox_ = InputPriorBoxFrom<GType>(inputs, scope);
    input_priorboxvar_ = InputPriorBoxVarFrom<GType>(inputs, scope);
    input_targetbox_ = InputTargetBoxFrom<GType>(inputs, scope);
    output_box_ = OutputBoxFrom<GType>(outputs, scope);
E
eclipsess 已提交
628 629
    code_type_ = GetAttr<std::string>("code_type", attrs);
  }
N
nhzlx 已提交
630
  const RType *InputPriorBox() const { return input_priorbox_; }
E
eclipsess 已提交
631

N
nhzlx 已提交
632
  const RType *InputPriorBoxVar() const { return input_priorboxvar_; }
E
eclipsess 已提交
633

N
nhzlx 已提交
634
  const RType *InputTargetBox() const { return input_targetbox_; }
E
eclipsess 已提交
635

N
nhzlx 已提交
636
  RType *OutputBox() const { return output_box_; }
E
eclipsess 已提交
637 638 639 640

  const std::string &CodeType() const { return code_type_; }

 private:
N
nhzlx 已提交
641 642 643 644
  RType *input_priorbox_;
  RType *input_priorboxvar_;
  RType *input_targetbox_;
  RType *output_box_;
E
eclipsess 已提交
645 646
  std::string code_type_;
};
L
liuruilong 已提交
647
#endif
W
wangliu 已提交
648

L
liuruilong 已提交
649
#ifdef SOFTMAX_OP
N
nhzlx 已提交
650
template <typename Dtype>
W
wangliu 已提交
651
class SoftmaxParam : public OpParam {
N
nhzlx 已提交
652 653 654
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
655 656
 public:
  SoftmaxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
657
               const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
658 659
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
W
wangliu 已提交
660
  }
N
nhzlx 已提交
661 662
  const RType *InputX() const { return input_x_; }
  RType *Out() const { return out_; }
W
wangliu 已提交
663 664

 private:
N
nhzlx 已提交
665 666
  RType *input_x_;
  RType *out_;
H
hanbuhe 已提交
667 668 669 670

#ifdef PADDLE_MOBILE_FPGA

 private:
N
nhzlx 已提交
671
  std::shared_ptr<RType> float_input_x_;
H
hanbuhe 已提交
672 673 674
  fpga::BypassArgs fpga_bypass_args;

 public:
N
nhzlx 已提交
675
  RType *FloatInput() {
H
hanbuhe 已提交
676 677 678 679 680 681
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
  void SetFloatInput(Tensor *input) { float_input_x_.reset(input); }
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
W
wangliu 已提交
682
};
L
liuruilong 已提交
683
#endif
W
wangliu 已提交
684

L
liuruilong 已提交
685
#ifdef SIGMOID_OP
N
nhzlx 已提交
686
template <typename Dtype>
W
wangliu 已提交
687
class SigmoidParam : public OpParam {
N
nhzlx 已提交
688 689 690
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
691 692
 public:
  SigmoidParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
693
               const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
694 695
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
W
wangliu 已提交
696
  }
N
nhzlx 已提交
697 698
  const RType *InputX() const { return input_x_; }
  RType *Out() const { return out_; }
W
wangliu 已提交
699 700

 private:
N
nhzlx 已提交
701 702
  RType *input_x_;
  RType *out_;
W
wangliu 已提交
703
};
L
liuruilong 已提交
704 705 706
#endif

#ifdef MULTICLASSNMS_OP
N
nhzlx 已提交
707
template <typename Dtype>
E
eclipsess 已提交
708
class MultiClassNMSParam : public OpParam {
N
nhzlx 已提交
709 710 711
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
712 713 714 715
 public:
  MultiClassNMSParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
                     const Scope &scope) {
N
nhzlx 已提交
716 717 718
    input_bboxes_ = InputBBoxesFrom<GType>(inputs, scope);
    input_scores_ = InputScoresFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
719 720 721 722 723 724 725 726
    background_label_ = GetAttr<int>("background_label", attrs);
    nms_top_k_ = GetAttr<int>("nms_top_k", attrs);
    keep_top_k_ = GetAttr<int>("keep_top_k", attrs);
    nms_threshold_ = GetAttr<float>("nms_threshold", attrs);
    nms_eta_ = GetAttr<float>("nms_eta", attrs);
    score_threshold_ = GetAttr<float>("score_threshold", attrs);
  }

N
nhzlx 已提交
727
  const RType *InputBBoxes() const { return input_bboxes_; }
E
eclipsess 已提交
728

N
nhzlx 已提交
729
  const RType *InputScores() const { return input_scores_; }
E
eclipsess 已提交
730

N
nhzlx 已提交
731
  RType *Out() const { return out_; }
E
eclipsess 已提交
732 733 734 735 736 737 738 739 740 741 742 743 744 745

  const int &BackGroundLabel() const { return background_label_; }

  const int &NMSTopK() const { return nms_top_k_; }

  const int &KeepTopK() const { return keep_top_k_; }

  const float &NMSThreshold() const { return nms_threshold_; }

  const float &NMSEta() const { return nms_eta_; }

  const float &ScoreThreshold() const { return score_threshold_; }

 private:
N
nhzlx 已提交
746 747 748
  RType *input_bboxes_;
  RType *input_scores_;
  RType *out_;
E
eclipsess 已提交
749 750 751 752 753 754 755
  int background_label_;
  int nms_top_k_;
  int keep_top_k_;
  float nms_threshold_;
  float nms_eta_;
  float score_threshold_;
};
L
liuruilong 已提交
756
#endif
W
wangliu 已提交
757

N
nhzlx 已提交
758
template <typename Dtype>
L
liuruilong 已提交
759
class FeedParam : public OpParam {
N
nhzlx 已提交
760 761 762
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
763 764
 public:
  FeedParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
L
liuruilong 已提交
765
            const AttributeMap &attrs, Scope *scope) {
N
nhzlx 已提交
766 767
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
L
liuruilong 已提交
768
    auto var = scope->Var("batch_size");
W
wangliu 已提交
769
    batch_size = var->GetValue<int>();
L
liuruilong 已提交
770
  }
N
nhzlx 已提交
771 772
  const RType *InputX() const { return input_x_; }
  RType *Out() const { return out_; }
W
wangliu 已提交
773
  const int BatchSize() const { return batch_size; }
L
liuruilong 已提交
774

L
liuruilong 已提交
775
 private:
N
nhzlx 已提交
776 777
  RType *input_x_;
  RType *out_;
W
wangliu 已提交
778
  int batch_size;
L
liuruilong 已提交
779 780
};

N
nhzlx 已提交
781
template <typename Dtype>
L
liuruilong 已提交
782
class FetchParam : public OpParam {
N
nhzlx 已提交
783 784 785
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
786 787
 public:
  FetchParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
788
             const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
789 790
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
L
liuruilong 已提交
791
  }
N
nhzlx 已提交
792 793
  const RType *InputX() const { return input_x_; }
  RType *Out() const { return out_; }
L
liuruilong 已提交
794

L
liuruilong 已提交
795
 private:
N
nhzlx 已提交
796 797
  RType *input_x_;
  RType *out_;
L
liuruilong 已提交
798 799
};

L
liuruilong 已提交
800
#ifdef TRANSPOSE_OP
N
nhzlx 已提交
801
template <typename Dtype>
E
eclipsess 已提交
802
class TransposeParam : public OpParam {
N
nhzlx 已提交
803 804 805
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
806 807 808
 public:
  TransposeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                 const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
809 810
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
811 812 813
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

N
nhzlx 已提交
814
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
815

N
nhzlx 已提交
816
  RType *Out() const { return out_; }
E
eclipsess 已提交
817 818 819 820

  const vector<int> &Axis() const { return axis_; }

 private:
N
nhzlx 已提交
821 822
  RType *input_x_;
  RType *out_;
E
eclipsess 已提交
823 824
  vector<int> axis_;
};
L
liuruilong 已提交
825
#endif
E
eclipsess 已提交
826

L
liuruilong 已提交
827
#ifdef RESHAPE_OP
N
nhzlx 已提交
828
template <typename Dtype>
E
eclipsess 已提交
829
class ReshapeParam : public OpParam {
N
nhzlx 已提交
830 831 832
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
833 834 835
 public:
  ReshapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
               const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
836 837 838
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_shape_ = InputShapeFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
839
    shape_ = GetAttr<vector<int>>("shape", attrs);
840 841 842 843 844 845 846

    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
      DLOG << "ReshapeParam lost inplace params. maybe fluid updated";
    }
E
eclipsess 已提交
847 848
  }

N
nhzlx 已提交
849
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
850

N
nhzlx 已提交
851
  const RType *InputShape() const { return input_shape_; }
E
eclipsess 已提交
852

N
nhzlx 已提交
853
  RType *Out() const { return out_; }
E
eclipsess 已提交
854 855 856 857 858 859

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
N
nhzlx 已提交
860 861 862
  RType *input_x_;
  RType *input_shape_;
  RType *out_;
E
eclipsess 已提交
863 864 865
  vector<int> shape_;
  bool inplace_;
};
L
liuruilong 已提交
866
#endif
E
eclipsess 已提交
867

T
Tian 已提交
868
#ifdef SCALE_OP
N
nhzlx 已提交
869
template <typename Dtype>
I
itminner 已提交
870
class ScaleParam : public OpParam {
N
nhzlx 已提交
871 872 873
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
874 875 876
 public:
  ScaleParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
877 878 879
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
I
itminner 已提交
880 881 882 883 884 885
    inplace_ = GetAttr<bool>("inplace", attrs);
    has_bias_ = GetAttr<bool>("has_bias", attrs);
    scales_ = GetAttr<vector<float>>("scales", attrs);
    biases_ = GetAttr<vector<float>>("biases", attrs);
  }

N
nhzlx 已提交
886
  const RType *InputX() const { return input_x_; }
I
itminner 已提交
887

N
nhzlx 已提交
888
  const RType *InputBias() const { return input_bias_; }
I
itminner 已提交
889

N
nhzlx 已提交
890
  RType *Out() const { return out_; }
I
itminner 已提交
891 892 893 894 895 896 897 898 899 900

  const bool &Inplace() const { return inplace_; }

  const bool &HasBias() const { return has_bias_; }

  const vector<float> &Scales() const { return scales_; }

  const vector<float> &Biases() const { return biases_; }

 private:
N
nhzlx 已提交
901 902 903
  RType *input_x_;
  RType *input_bias_;
  RType *out_;
I
itminner 已提交
904 905 906 907 908
  bool inplace_;
  bool has_bias_;
  vector<float> scales_;
  vector<float> biases_;
};
T
Tian 已提交
909 910 911
#endif

#ifdef SLICE_OP
N
nhzlx 已提交
912
template <typename Dtype>
I
itminner 已提交
913
class SliceParam : public OpParam {
N
nhzlx 已提交
914 915 916
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
917 918 919
 public:
  SliceParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
920 921 922
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_shape_ = InputShapeFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
I
itminner 已提交
923 924 925 926 927
    axis_ = GetAttr<int>("axis", attrs);
    slice_points_ = GetAttr<vector<int>>("slice_points", attrs);
    inplace_ = GetAttr<bool>("inplace", attrs);
  }

N
nhzlx 已提交
928
  const RType *InputX() const { return input_x_; }
I
itminner 已提交
929

N
nhzlx 已提交
930
  const RType *InputShape() const { return input_shape_; }
I
itminner 已提交
931

N
nhzlx 已提交
932
  RType *Out() const { return out_; }
I
itminner 已提交
933 934 935 936 937 938 939 940

  const int &Axis() const { return axis_; }

  const vector<int> &SlicePoints() const { return slice_points_; }

  const bool &Inplace() const { return inplace_; }

 private:
N
nhzlx 已提交
941 942 943
  RType *input_x_;
  RType *input_shape_;
  RType *out_;
I
itminner 已提交
944 945 946 947
  int axis_;
  vector<int> slice_points_;
  bool inplace_;
};
T
Tian 已提交
948 949 950
#endif

#ifdef RESIZE_OP
N
nhzlx 已提交
951
template <typename Dtype>
T
Tian 已提交
952
class ResizeParam : public OpParam {
N
nhzlx 已提交
953 954 955
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
956 957 958
 public:
  ResizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
959 960 961
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_shape_ = InputShapeFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
I
itminner 已提交
962 963 964 965 966 967
    is_pyramid_test_ = GetAttr<bool>("is_pyramid_test", attrs);
    height_ = GetAttr<int>("height", attrs);
    width_ = GetAttr<int>("width", attrs);
    out_height_scale_ = GetAttr<float>("out_height_scale", attrs);
    out_width_scale_ = GetAttr<float>("out_width_scale", attrs);
  }
T
Tian 已提交
968

N
nhzlx 已提交
969
  const RType *InputX() const { return input_x_; }
T
Tian 已提交
970

N
nhzlx 已提交
971
  const RType *InputShape() const { return input_shape_; }
T
Tian 已提交
972

N
nhzlx 已提交
973
  RType *Out() const { return out_; }
T
Tian 已提交
974

I
itminner 已提交
975
  const bool &IsPyramidTest() const { return is_pyramid_test_; }
T
Tian 已提交
976

I
itminner 已提交
977
  const int &Height() const { return height_; }
T
Tian 已提交
978

I
itminner 已提交
979
  const int &Width() const { return width_; }
T
Tian 已提交
980

I
itminner 已提交
981
  const float &OutHeightScale() const { return out_height_scale_; }
T
Tian 已提交
982

I
itminner 已提交
983
  const float &OutWidthScale() const { return out_width_scale_; }
T
Tian 已提交
984

I
itminner 已提交
985
 private:
N
nhzlx 已提交
986 987 988
  RType *input_x_;
  RType *input_shape_;
  RType *out_;
I
itminner 已提交
989 990 991 992 993
  bool is_pyramid_test_;
  int height_;
  int width_;
  float out_height_scale_;
  float out_width_scale_;
T
Tian 已提交
994 995 996
};
#endif

L
liuruilong 已提交
997
#ifdef RELU_OP
L
liuruilong 已提交
998 999 1000
/*
 * @b op 层实例化好这个 param 传递给 kernel 层使用
 * */
N
nhzlx 已提交
1001
template <typename Dtype>
E
eclipsess 已提交
1002
class ReluParam : public OpParam {
N
nhzlx 已提交
1003 1004 1005
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1006 1007 1008
 public:
  ReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
            const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1009 1010
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
1011 1012
  }

N
nhzlx 已提交
1013
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
1014

N
nhzlx 已提交
1015
  RType *Out() const { return out_; }
E
eclipsess 已提交
1016 1017

 private:
N
nhzlx 已提交
1018 1019
  RType *input_x_;
  RType *out_;
E
eclipsess 已提交
1020
};
L
liuruilong 已提交
1021
#endif
E
eclipsess 已提交
1022

T
Tian 已提交
1023
#ifdef PRELU_OP
N
nhzlx 已提交
1024
template <typename Dtype>
T
Tian 已提交
1025
class PReluParam : public OpParam {
N
nhzlx 已提交
1026 1027 1028
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1029 1030 1031
 public:
  PReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, const Scope &scope) {
1032
    DLOG << "PReluParam inputs before";
N
nhzlx 已提交
1033
    input_x_ = InputXFrom<GType>(inputs, scope);
N
nhzlx 已提交
1034
    alpha_ = InputAlphaFrom<GType>(inputs, scope);
1035
    framework::DDim dims = alpha_->dims();
N
nhzlx 已提交
1036
    out_ = OutFrom<GType>(outputs, scope);
1037 1038
    mode_ = GetAttr<std::string>("mode", attrs);
    DLOG << "PReluParam mode after" << mode_;
I
itminner 已提交
1039
  }
N
nhzlx 已提交
1040
  const RType *InputX() const { return input_x_; }
N
nhzlx 已提交
1041
  const RType *InputAlpha() const { return alpha_; }
N
nhzlx 已提交
1042
  RType *Out() const { return out_; }
1043
  const std::string &Mode() const { return mode_; }
T
Tian 已提交
1044

I
itminner 已提交
1045
 private:
N
nhzlx 已提交
1046 1047
  RType *input_x_;
  RType *out_;
N
nhzlx 已提交
1048
  RType *alpha_;
1049
  std::string mode_;
T
Tian 已提交
1050 1051 1052
};
#endif

N
nhzlx 已提交
1053
template <typename Dtype>
L
liuruilong 已提交
1054
class FusionFcParam : public OpParam {
N
nhzlx 已提交
1055 1056 1057
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1058
 public:
L
liuruilong 已提交
1059
  FusionFcParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
L
liuruilong 已提交
1060
                const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1061 1062 1063 1064
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_y_ = InputYFrom<GType>(inputs, scope);
    input_z_ = InputZFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
1065 1066 1067 1068
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }
N
nhzlx 已提交
1069
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
1070

1071
#ifdef PADDLE_MOBILE_FPGA
N
nhzlx 已提交
1072
  RType *InputY() const { return input_y_; }
1073
#else
N
nhzlx 已提交
1074
  const RType *InputY() const { return input_y_; }
1075
#endif
E
eclipsess 已提交
1076

N
nhzlx 已提交
1077
  const RType *InputZ() const { return input_z_; }
E
eclipsess 已提交
1078

N
nhzlx 已提交
1079
  RType *Out() const { return out_; }
E
eclipsess 已提交
1080 1081 1082 1083 1084 1085 1086 1087

  const int &XNumColDims() const { return x_num_col_dims_; }

  const int &YNumColDims() const { return y_num_col_dims_; }

  const int &Axis() const { return axis_; }

 private:
N
nhzlx 已提交
1088 1089 1090 1091
  RType *input_x_;
  RType *input_y_;
  RType *input_z_;
  RType *out_;
E
eclipsess 已提交
1092 1093 1094
  int x_num_col_dims_;
  int y_num_col_dims_;
  int axis_;
Z
zhangyang 已提交
1095 1096 1097
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
1098
  fpga::ConvArgs fpga_conv_args;
Z
zhangyang 已提交
1099 1100

 public:
H
hanbuhe 已提交
1101 1102
  const fpga::ConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::ConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1103
#endif
E
eclipsess 已提交
1104
};
1105 1106

#ifdef FUSION_FCRELU_OP
N
nhzlx 已提交
1107 1108
template <typename DeviceType>
using FusionFcReluParam = FusionFcParam<DeviceType>;
L
liuruilong 已提交
1109
#endif
E
eclipsess 已提交
1110

N
nhzlx 已提交
1111
template <typename Dtype>
L
liuruilong 已提交
1112
class FusionConvAddParam : public OpParam {
N
nhzlx 已提交
1113 1114 1115
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1116
 public:
L
liuruilong 已提交
1117
  FusionConvAddParam(const VariableNameMap &inputs,
L
liuruilong 已提交
1118 1119
                     const VariableNameMap &outputs, const AttributeMap &attrs,
                     const Scope &scope) {
N
nhzlx 已提交
1120
    bias_ = InputYFrom<GType>(inputs, scope);
W
wangliu 已提交
1121
    axis_ = GetAttr<int>("axis", attrs);
N
nhzlx 已提交
1122 1123 1124
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_ = OutFrom<GType>(outputs, scope);
W
wangliu 已提交
1125 1126 1127 1128 1129
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
    groups = GetAttr<int>("groups", attrs);
  }
N
nhzlx 已提交
1130
  RType *Bias() const { return bias_; }
W
wangliu 已提交
1131 1132 1133

  const int &Axis() const { return axis_; }

N
nhzlx 已提交
1134
  const RType *Input() const { return input_; }
W
wangliu 已提交
1135

1136
#ifdef PADDLE_MOBILE_FPGA
N
nhzlx 已提交
1137
  RType *Filter() const { return filter_; }
1138
#else
N
nhzlx 已提交
1139
  const RType *Filter() const { return filter_; }
1140
#endif
W
wangliu 已提交
1141

N
nhzlx 已提交
1142
  RType *Output() const { return output_; }
W
wangliu 已提交
1143 1144 1145 1146 1147 1148 1149 1150 1151

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

  const vector<int> &Dilations() const { return dilations_; }

  const int &Groups() const { return groups; }

L
liuruilong 已提交
1152
 protected:
N
nhzlx 已提交
1153
  RType *bias_;
W
wangliu 已提交
1154
  int axis_;
N
nhzlx 已提交
1155 1156 1157
  RType *input_;
  RType *output_;
  RType *filter_;
W
wangliu 已提交
1158 1159 1160 1161
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
  int groups;
Z
zhangyang 已提交
1162 1163 1164
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
1165
  fpga::ConvArgs fpga_conv_args;
Z
zhangyang 已提交
1166 1167

 public:
H
hanbuhe 已提交
1168 1169
  const fpga::ConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::ConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1170
#endif
W
wangliu 已提交
1171 1172
};

N
nhzlx 已提交
1173 1174
template <typename Dtype>
Print &operator<<(Print &printer, const FusionConvAddParam<Dtype> &conv_param);
W
wangliu 已提交
1175

Z
zhangyang 已提交
1176
#ifdef FUSION_CONVADDRELU_OP
N
nhzlx 已提交
1177 1178
template <typename DeviceType>
class FusionConvAddReluParam : public FusionConvAddParam<DeviceType> {
L
liuruilong 已提交
1179
 public:
L
liuruilong 已提交
1180
  FusionConvAddReluParam(const VariableNameMap &inputs,
L
liuruilong 已提交
1181 1182
                         const VariableNameMap &outputs,
                         const AttributeMap &attrs, const Scope &scope)
N
nhzlx 已提交
1183
      : FusionConvAddParam<DeviceType>(inputs, outputs, attrs, scope) {}
L
liuruilong 已提交
1184 1185 1186
};
#endif

E
eclipsess 已提交
1187
#ifdef FUSION_CONVADDBNRELU_OP
N
nhzlx 已提交
1188
template <typename Dtype>
E
eclipsess 已提交
1189
class FusionConvAddBNReluParam : public OpParam {
N
nhzlx 已提交
1190 1191 1192
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1193 1194 1195 1196
 public:
  FusionConvAddBNReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
                           const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1197
    bias_ = InputYFrom<GType>(inputs, scope);
E
eclipsess 已提交
1198
    axis_ = GetAttr<int>("axis", attrs);
N
nhzlx 已提交
1199 1200 1201
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
1202 1203 1204 1205
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
    groups = GetAttr<int>("groups", attrs);
N
nhzlx 已提交
1206 1207 1208 1209
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    input_mean_ = InputMeanFrom<GType>(inputs, scope);
    input_scale_ = InputScaleFrom<GType>(inputs, scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, scope);
E
eclipsess 已提交
1210 1211
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
L
liuruilong 已提交
1212
    //    is_test_ = GetAttr<bool>("is_test", attrs);
E
eclipsess 已提交
1213
  }
N
nhzlx 已提交
1214
  RType *Bias() const { return bias_; }
E
eclipsess 已提交
1215 1216 1217

  const int &Axis() const { return axis_; }

N
nhzlx 已提交
1218
  const RType *Input() const { return input_; }
E
eclipsess 已提交
1219

1220
#ifdef PADDLE_MOBILE_FPGA
N
nhzlx 已提交
1221
  RType *Filter() const { return filter_; }
1222
#else
N
nhzlx 已提交
1223
  const RType *Filter() const { return filter_; }
1224
#endif
E
eclipsess 已提交
1225

N
nhzlx 已提交
1226
  RType *Output() const { return output_; }
E
eclipsess 已提交
1227 1228 1229 1230 1231 1232 1233 1234 1235

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

  const vector<int> &Dilations() const { return dilations_; }

  const int &Groups() const { return groups; }

N
nhzlx 已提交
1236
  const RType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
1237

N
nhzlx 已提交
1238
  const RType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
1239

N
nhzlx 已提交
1240
  const RType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
1241

N
nhzlx 已提交
1242
  const RType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
1243 1244 1245 1246 1247 1248 1249

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1250
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
E
eclipsess 已提交
1251

N
nhzlx 已提交
1252
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
E
eclipsess 已提交
1253

N
nhzlx 已提交
1254
  const RType *NewScale() const { return new_scale_; }
E
eclipsess 已提交
1255

N
nhzlx 已提交
1256
  const RType *NewBias() const { return new_bias_; }
E
eclipsess 已提交
1257 1258

 protected:
N
nhzlx 已提交
1259
  RType *bias_;
E
eclipsess 已提交
1260
  int axis_;
N
nhzlx 已提交
1261 1262 1263
  RType *input_;
  RType *output_;
  RType *filter_;
E
eclipsess 已提交
1264 1265 1266 1267
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
  int groups;
N
nhzlx 已提交
1268 1269 1270 1271
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
E
eclipsess 已提交
1272 1273 1274
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1275 1276
  RType *new_bias_;
  RType *new_scale_;
Z
zhangyang 已提交
1277 1278 1279
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
1280
  fpga::ConvArgs fpga_conv_args;
Z
zhangyang 已提交
1281 1282

 public:
H
hanbuhe 已提交
1283 1284
  const fpga::ConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::ConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1285
#endif
E
eclipsess 已提交
1286
};
1287
#endif
E
eclipsess 已提交
1288

Z
zhangyang 已提交
1289
#ifdef FUSION_CONVBN_OP
N
nhzlx 已提交
1290
template <typename Dtype>
Z
zhangyang 已提交
1291
class FusionConvBNParam : public OpParam {
N
nhzlx 已提交
1292 1293 1294
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Z
zhangyang 已提交
1295 1296 1297 1298
 public:
  FusionConvBNParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    const Scope &scope) {
N
nhzlx 已提交
1299 1300 1301
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_y_ = OutputYFrom<GType>(outputs, scope);
Z
zhangyang 已提交
1302 1303 1304 1305
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
    groups = GetAttr<int>("groups", attrs);
N
nhzlx 已提交
1306 1307 1308 1309
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    input_mean_ = InputMeanFrom<GType>(inputs, scope);
    input_scale_ = InputScaleFrom<GType>(inputs, scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, scope);
Z
zhangyang 已提交
1310 1311 1312 1313 1314
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
    //    is_test_ = GetAttr<bool>("is_test", attrs);
  }

N
nhzlx 已提交
1315
  const RType *Input() const { return input_; }
Z
zhangyang 已提交
1316 1317

#ifdef PADDLE_MOBILE_FPGA
N
nhzlx 已提交
1318
  RType *Filter() const { return filter_; }
Z
zhangyang 已提交
1319
#else
N
nhzlx 已提交
1320
  const RType *Filter() const { return filter_; }
Z
zhangyang 已提交
1321
#endif
N
nhzlx 已提交
1322
  RType *Output() const { return output_y_; }
Z
zhangyang 已提交
1323 1324 1325 1326 1327 1328 1329 1330 1331

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

  const vector<int> &Dilations() const { return dilations_; }

  const int &Groups() const { return groups; }

N
nhzlx 已提交
1332
  const RType *InputBias() const { return input_bias_; }
Z
zhangyang 已提交
1333

N
nhzlx 已提交
1334
  const RType *InputMean() const { return input_mean_; }
Z
zhangyang 已提交
1335

N
nhzlx 已提交
1336
  const RType *InputScale() const { return input_scale_; }
Z
zhangyang 已提交
1337

N
nhzlx 已提交
1338
  const RType *InputVariance() const { return input_variance_; }
Z
zhangyang 已提交
1339 1340 1341 1342 1343 1344 1345

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1346
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
Z
zhangyang 已提交
1347

N
nhzlx 已提交
1348
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
Z
zhangyang 已提交
1349

N
nhzlx 已提交
1350
  const RType *NewScale() const { return new_scale_; }
Z
zhangyang 已提交
1351

N
nhzlx 已提交
1352
  const RType *NewBias() const { return new_bias_; }
Z
zhangyang 已提交
1353 1354

 protected:
N
nhzlx 已提交
1355 1356 1357
  RType *input_;
  RType *output_y_;
  RType *filter_;
Z
zhangyang 已提交
1358 1359 1360 1361
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
  int groups;
N
nhzlx 已提交
1362 1363 1364 1365
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
Z
zhangyang 已提交
1366 1367 1368
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1369 1370
  RType *new_bias_;
  RType *new_scale_;
Z
zhangyang 已提交
1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::ConvArgs fpga_conv_args;

 public:
  const fpga::ConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::ConvArgs &args) { fpga_conv_args = args; }
#endif
};
#endif

1383
#ifdef FUSION_CONVADDBN_OP
N
nhzlx 已提交
1384
template <typename Dtype>
1385
class FusionConvAddBNParam : public OpParam {
N
nhzlx 已提交
1386 1387 1388
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

1389 1390 1391 1392
 public:
  FusionConvAddBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
                       const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1393
    bias_ = InputYFrom<GType>(inputs, scope);
1394
    axis_ = GetAttr<int>("axis", attrs);
N
nhzlx 已提交
1395 1396 1397
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_y_ = OutputYFrom<GType>(outputs, scope);
1398 1399 1400 1401
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
    groups = GetAttr<int>("groups", attrs);
N
nhzlx 已提交
1402 1403 1404 1405
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    input_mean_ = InputMeanFrom<GType>(inputs, scope);
    input_scale_ = InputScaleFrom<GType>(inputs, scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, scope);
1406 1407 1408 1409
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
    //    is_test_ = GetAttr<bool>("is_test", attrs);
  }
N
nhzlx 已提交
1410
  RType *Bias() const { return bias_; }
1411 1412 1413

  const int &Axis() const { return axis_; }

N
nhzlx 已提交
1414
  const RType *Input() const { return input_; }
1415

1416
#ifdef PADDLE_MOBILE_FPGA
N
nhzlx 已提交
1417
  RType *Filter() const { return filter_; }
1418
#else
N
nhzlx 已提交
1419
  const RType *Filter() const { return filter_; }
1420
#endif
N
nhzlx 已提交
1421
  RType *Output() const { return output_y_; }
1422 1423 1424 1425 1426 1427 1428 1429 1430

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

  const vector<int> &Dilations() const { return dilations_; }

  const int &Groups() const { return groups; }

N
nhzlx 已提交
1431
  const RType *InputBias() const { return input_bias_; }
1432

N
nhzlx 已提交
1433
  const RType *InputMean() const { return input_mean_; }
1434

N
nhzlx 已提交
1435
  const RType *InputScale() const { return input_scale_; }
1436

N
nhzlx 已提交
1437
  const RType *InputVariance() const { return input_variance_; }
1438 1439 1440 1441 1442 1443 1444

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1445
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
1446

N
nhzlx 已提交
1447
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
1448

N
nhzlx 已提交
1449
  const RType *NewScale() const { return new_scale_; }
1450

N
nhzlx 已提交
1451
  const RType *NewBias() const { return new_bias_; }
1452 1453

 protected:
N
nhzlx 已提交
1454
  RType *bias_;
1455
  int axis_;
N
nhzlx 已提交
1456 1457 1458
  RType *input_;
  RType *output_y_;
  RType *filter_;
1459 1460 1461 1462
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
  int groups;
N
nhzlx 已提交
1463 1464 1465 1466
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
1467 1468 1469
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1470 1471
  RType *new_bias_;
  RType *new_scale_;
Z
zhangyang 已提交
1472 1473 1474
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
1475
  fpga::ConvArgs fpga_conv_args;
Z
zhangyang 已提交
1476 1477

 public:
H
hanbuhe 已提交
1478 1479
  const fpga::ConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::ConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1480
#endif
1481
};
E
eclipsess 已提交
1482
#endif
Y
Yao,kun 已提交
1483

E
eclipsess 已提交
1484
#ifdef FUSION_DWCONVBNRELU_OP
N
nhzlx 已提交
1485
template <typename Dtype>
E
eclipsess 已提交
1486
class FusionDWConvBNReluParam : public OpParam {
N
nhzlx 已提交
1487 1488 1489
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1490 1491 1492 1493
 public:
  FusionDWConvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
                          const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1494 1495 1496
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
1497 1498 1499 1500
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
    groups = GetAttr<int>("groups", attrs);
N
nhzlx 已提交
1501 1502 1503 1504
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    input_mean_ = InputMeanFrom<GType>(inputs, scope);
    input_scale_ = InputScaleFrom<GType>(inputs, scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, scope);
E
eclipsess 已提交
1505 1506
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
1507
    //    is_test_ = GetAttr<bool>("is_test", attrs);
E
eclipsess 已提交
1508 1509
  }

N
nhzlx 已提交
1510
  const RType *Input() const { return input_; }
E
eclipsess 已提交
1511

N
nhzlx 已提交
1512
  const RType *Filter() const { return filter_; }
E
eclipsess 已提交
1513

N
nhzlx 已提交
1514
  RType *Output() const { return output_; }
E
eclipsess 已提交
1515 1516 1517 1518 1519 1520 1521 1522 1523

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

  const vector<int> &Dilations() const { return dilations_; }

  const int &Groups() const { return groups; }

N
nhzlx 已提交
1524
  const RType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
1525

N
nhzlx 已提交
1526
  const RType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
1527

N
nhzlx 已提交
1528
  const RType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
1529

N
nhzlx 已提交
1530
  const RType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
1531 1532 1533 1534 1535 1536 1537

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1538
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
E
eclipsess 已提交
1539

N
nhzlx 已提交
1540
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
E
eclipsess 已提交
1541

N
nhzlx 已提交
1542
  const RType *NewScale() const { return new_scale_; }
E
eclipsess 已提交
1543

N
nhzlx 已提交
1544
  const RType *NewBias() const { return new_bias_; }
E
eclipsess 已提交
1545 1546

 protected:
N
nhzlx 已提交
1547 1548 1549
  RType *input_;
  RType *output_;
  RType *filter_;
E
eclipsess 已提交
1550 1551 1552 1553
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
  int groups;
N
nhzlx 已提交
1554 1555 1556 1557
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
E
eclipsess 已提交
1558 1559 1560
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1561 1562
  RType *new_bias_;
  RType *new_scale_;
E
eclipsess 已提交
1563 1564 1565 1566
};

#endif

1567
#ifdef FUSION_CONVBNRELU_OP
N
nhzlx 已提交
1568
template <typename Dtype>
1569
class FusionConvBNReluParam : public OpParam {
N
nhzlx 已提交
1570 1571 1572
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

1573 1574 1575 1576
 public:
  FusionConvBNReluParam(const VariableNameMap &inputs,
                        const VariableNameMap &outputs,
                        const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1577 1578 1579
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_ = OutFrom<GType>(outputs, scope);
1580 1581 1582 1583 1584

    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
    groups = GetAttr<int>("groups", attrs);
N
nhzlx 已提交
1585 1586 1587 1588
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    input_mean_ = InputMeanFrom<GType>(inputs, scope);
    input_scale_ = InputScaleFrom<GType>(inputs, scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, scope);
1589 1590 1591 1592 1593
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
    //    is_test_ = GetAttr<bool>("is_test", attrs);
  }

N
nhzlx 已提交
1594
  const RType *Input() const { return input_; }
1595

Z
zhangyang 已提交
1596
#ifdef PADDLE_MOBILE_FPGA
N
nhzlx 已提交
1597
  RType *Filter() const { return filter_; }
Z
zhangyang 已提交
1598
#else
N
nhzlx 已提交
1599
  const RType *Filter() const { return filter_; }
Z
zhangyang 已提交
1600
#endif
1601

N
nhzlx 已提交
1602
  RType *Output() const { return output_; }
1603 1604 1605 1606 1607 1608 1609 1610 1611

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

  const vector<int> &Dilations() const { return dilations_; }

  const int &Groups() const { return groups; }

N
nhzlx 已提交
1612
  const RType *InputBias() const { return input_bias_; }
1613

N
nhzlx 已提交
1614
  const RType *InputMean() const { return input_mean_; }
1615

N
nhzlx 已提交
1616
  const RType *InputScale() const { return input_scale_; }
1617

N
nhzlx 已提交
1618
  const RType *InputVariance() const { return input_variance_; }
1619 1620 1621 1622 1623 1624 1625

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1626
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
1627

N
nhzlx 已提交
1628
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
1629

N
nhzlx 已提交
1630
  const RType *NewScale() const { return new_scale_; }
1631

N
nhzlx 已提交
1632
  const RType *NewBias() const { return new_bias_; }
1633 1634

 protected:
N
nhzlx 已提交
1635 1636 1637
  RType *input_;
  RType *output_;
  RType *filter_;
1638 1639 1640 1641
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
  int groups;
N
nhzlx 已提交
1642 1643 1644 1645
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
1646 1647 1648
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1649 1650
  RType *new_bias_;
  RType *new_scale_;
Z
zhangyang 已提交
1651 1652 1653 1654 1655 1656 1657 1658 1659
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::ConvArgs fpga_conv_args;

 public:
  const fpga::ConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::ConvArgs &args) { fpga_conv_args = args; }
#endif
1660 1661 1662
};
#endif

Y
Yao,kun 已提交
1663
#ifdef IM2SEQUENCE_OP
N
nhzlx 已提交
1664
template <typename Dtype>
Y
Yao,kun 已提交
1665
class Im2SequenceParam : public OpParam {
N
nhzlx 已提交
1666 1667 1668
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
1669 1670 1671 1672
 public:
  Im2SequenceParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
                   const Scope &scope) {
N
nhzlx 已提交
1673 1674
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
Y
Yao,kun 已提交
1675 1676 1677 1678 1679
    kernels_ = GetAttr<vector<int>>("kernels", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
  }

N
nhzlx 已提交
1680
  const RType *Input() const { return input_x_; }
Y
Yao,kun 已提交
1681

N
nhzlx 已提交
1682
  RType *Output() const { return out_; }
Y
Yao,kun 已提交
1683 1684 1685 1686 1687 1688 1689 1690

  const vector<int> &Kernels() const { return kernels_; }

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

 private:
N
nhzlx 已提交
1691 1692
  RType *input_x_;
  RType *out_;
Y
Yao,kun 已提交
1693 1694 1695 1696
  vector<int> kernels_;
  vector<int> strides_;
  vector<int> paddings_;
};
1697
#endif
Y
Yao,kun 已提交
1698

1699
#ifdef DROPOUT_OP
N
nhzlx 已提交
1700
template <typename Dtype>
Y
Yao,kun 已提交
1701
class DropoutParam : public OpParam {
N
nhzlx 已提交
1702 1703 1704
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
1705 1706 1707
 public:
  DropoutParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
               const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1708 1709
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
Y
Yao,kun 已提交
1710 1711
  }

N
nhzlx 已提交
1712
  const RType *InputX() const { return input_x_; }
Y
Yao,kun 已提交
1713

N
nhzlx 已提交
1714
  RType *Out() const { return out_; }
Y
Yao,kun 已提交
1715 1716

 private:
N
nhzlx 已提交
1717 1718
  RType *input_x_;
  RType *out_;
Y
Yao,kun 已提交
1719
};
1720
#endif
Y
Yao,kun 已提交
1721

L
liuruilong 已提交
1722
#ifdef CONV_TRANSPOSE
N
nhzlx 已提交
1723
template <typename Dtype>
L
liuruilong 已提交
1724
class ConvTransposeParam : public OpParam {
N
nhzlx 已提交
1725 1726 1727
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1728 1729 1730 1731
 public:
  ConvTransposeParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
                     const Scope &scope) {
N
nhzlx 已提交
1732 1733 1734
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_ = OutputFrom<GType>(outputs, scope);
L
liuruilong 已提交
1735 1736 1737 1738 1739 1740
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
    groups = GetAttr<int>("groups", attrs);
  }

N
nhzlx 已提交
1741
  const RType *Input() const { return input_; }
L
liuruilong 已提交
1742

N
nhzlx 已提交
1743
  const RType *Filter() const { return filter_; }
L
liuruilong 已提交
1744

N
nhzlx 已提交
1745
  RType *Output() const { return output_; }
L
liuruilong 已提交
1746 1747 1748 1749 1750 1751 1752 1753 1754 1755

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

  const vector<int> &Dilations() const { return dilations_; }

  const int &Groups() const { return groups; }

 private:
N
nhzlx 已提交
1756 1757 1758
  RType *input_;
  RType *output_;
  RType *filter_;
L
liuruilong 已提交
1759 1760 1761 1762 1763 1764 1765
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
  int groups;
};
#endif

朔-望's avatar
朔-望 已提交
1766 1767
}  // namespace operators
}  // namespace paddle_mobile