op_param.h 66.4 KB
Newer Older
W
wangliu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
朔-望's avatar
朔-望 已提交
14

15
#pragma once
朔-望's avatar
朔-望 已提交
16

E
eclipsess 已提交
17
#include <string>
W
wangliu 已提交
18
#include <vector>
L
liuruilong 已提交
19
#include "common/log.h"
朔-望's avatar
朔-望 已提交
20
#include "common/type_define.h"
N
nhzlx 已提交
21
#include "common/types.h"
朔-望's avatar
朔-望 已提交
22 23 24 25
#include "framework/lod_tensor.h"
#include "framework/scope.h"
#include "framework/tensor.h"
#include "framework/variable.h"
Z
zhangyang 已提交
26
#ifdef PADDLE_MOBILE_FPGA
H
hanbuhe 已提交
27
#include "fpga/api.h"
Z
zhangyang 已提交
28
#endif
朔-望's avatar
朔-望 已提交
29 30

namespace paddle_mobile {
朔-望's avatar
朔-望 已提交
31 32
namespace operators {

W
wangliu 已提交
33 34 35 36 37 38 39
using framework::Attribute;
using framework::AttributeMap;
using framework::LoDTensor;
using framework::Scope;
using framework::Tensor;
using std::string;
using std::vector;
朔-望's avatar
朔-望 已提交
40

N
nhzlx 已提交
41 42 43 44 45 46 47 48 49
template <typename Dtype>
struct DtypeTensorTrait {
  // This is the type we obtained in variable.
  typedef framework::LoDTensor gtype;
  // This type will be the parent class type
  // or the same type.
  typedef framework::Tensor rtype;
};

L
liuruilong 已提交
50
class OpParam {
朔-望's avatar
朔-望 已提交
51
 protected:
xiebaiyuan's avatar
xiebaiyuan 已提交
52 53 54 55
  template <typename T>
  static T *InputH0From(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("H0", inputs, scope);
  }
56 57 58 59 60
  template <typename T>
  static T *InputAlphaFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Alpha", inputs, scope);
  }

61 62 63 64 65 66 67 68 69
  template <typename T>
  static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Input", inputs, scope);
  }

  template <typename T>
  static T *InputXFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("X", inputs, scope);
  }
70 71 72 73 74
  template <typename T>
  static T *InputOutSizeFrom(const VariableNameMap &inputs,
                             const Scope &scope) {
    return GetVarValue<T>("OutSize", inputs, scope);
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101

  template <typename T>
  static T *InputWFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("W", inputs, scope);
  }

  template <typename T>
  static T *InputIdsFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Ids", inputs, scope);
  }

  template <typename T>
  static T *InputEmissionFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Emission", inputs, scope);
  }

  template <typename T>
  static T *InputTransitionFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("Transition", inputs, scope);
  }
  template <typename T>
  static T *InputLabelFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Label", inputs, scope);
  }

102 103 104 105
  template <typename T>
  static T *InputXFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("addX", inputs, scope);
  }
106 107 108 109 110 111

  template <typename T>
  static T *InputYFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Y", inputs, scope);
  }

112 113 114 115 116
  template <typename T>
  static T *InputYFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("Y", inputs, scope);
  }

E
eclipsess 已提交
117 118 119 120 121
  template <typename T>
  static T *InputZFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Z", inputs, scope);
  }

122 123 124 125 126
  template <typename T>
  static T *InputBiasFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Bias", inputs, scope);
  }
  template <typename T>
xiebaiyuan's avatar
xiebaiyuan 已提交
127 128 129 130
  static T *InputWeightFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Weight", inputs, scope);
  }
  template <typename T>
131 132 133 134 135 136 137 138 139 140 141 142
  static T *InputVarianceFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Variance", inputs, scope);
  }
  template <typename T>
  static T *InputMeanFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Mean", inputs, scope);
  }
  template <typename T>
  static T *InputScaleFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scale", inputs, scope);
  }
E
eclipsess 已提交
143 144 145 146
  template <typename T>
  static T *InputImageFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Image", inputs, scope);
  }
E
eclipsess 已提交
147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
  template <typename T>
  static T *InputPriorBoxFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("PriorBox", inputs, scope);
  }
  template <typename T>
  static T *InputPriorBoxVarFrom(const VariableNameMap &inputs,
                                 const Scope &scope) {
    return GetVarValue<T>("PriorBoxVar", inputs, scope);
  }
  // LoDTensor but now use Tensor
  template <typename T>
  static T *InputTargetBoxFrom(const VariableNameMap &inputs,
                               const Scope &scope) {
    return GetVarValue<T>("TargetBox", inputs, scope);
  }
163

E
eclipsess 已提交
164 165 166 167 168 169 170 171 172 173
  template <typename T>
  static T *InputBBoxesFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("BBoxes", inputs, scope);
  }

  template <typename T>
  static T *InputScoresFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scores", inputs, scope);
  }

E
eclipsess 已提交
174 175 176 177
  template <typename T>
  static T *InputShapeFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Shape", inputs, scope);
  }
E
eclipsess 已提交
178

179
  template <typename T>
W
wangliu 已提交
180 181
  static vector<T *> InputMultiFrom(const VariableNameMap &inputs,
                                    const Scope &scope) {
182 183 184
    return GetMultiVarValue<T>("X", inputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213
  template <typename T>
  static T *OutputBatchGateFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("BatchGate", outputs, scope);
  }

  template <typename T>
  static T *OutputViterbiPathFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("ViterbiPath", outputs, scope);
  }
  template <typename T>
  static T *OutputBatchResetHiddenPrevFrom(const VariableNameMap &outputs,
                                           const Scope &scope) {
    return GetVarValue<T>("BatchResetHiddenPrev", outputs, scope);
  }

  template <typename T>
  static T *OutputBatchHiddenFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("BatchHidden", outputs, scope);
  }

  template <typename T>
  static T *OutputHiddenFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("Hidden", outputs, scope);
  }

214 215 216 217 218 219 220 221 222 223
  template <typename T>
  static T *OutputFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Output", outputs, scope);
  }

  template <typename T>
  static T *OutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Out", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
224 225 226 227 228 229
  template <typename T>
  static vector<T *> OutMultiFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetMultiVarValue<T>("Out", outputs, scope);
  }

230 231 232 233 234
  template <typename T>
  static T *OutputYFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Y", outputs, scope);
  }

E
eclipsess 已提交
235 236 237 238 239 240
  template <typename T>
  static T *OutputBoxesFrom(const VariableNameMap &outputs,
                            const Scope &scope) {
    return GetVarValue<T>("Boxes", outputs, scope);
  }

E
eclipsess 已提交
241 242 243 244 245
  template <typename T>
  static T *OutputBoxFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("OutputBox", outputs, scope);
  }

E
eclipsess 已提交
246 247 248 249 250 251
  template <typename T>
  static T *OutputVariancesFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("Variances", outputs, scope);
  }

252 253 254 255 256 257 258 259 260 261 262
  template <typename T>
  static T *MidOutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("MidOut", outputs, scope);
  }

  template <typename T>
  static T *FilterFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Filter", inputs, scope);
  }

  template <typename T>
W
wangliu 已提交
263
  static const T GetAttr(const string &key, const AttributeMap &map) {
264 265
    return ((Attribute)map.at(key)).Get<T>();
  }
266 267 268
  static const std::string GetStringAttr(const string &key, const AttributeMap &map) {
    return ((Attribute)map.at(key)).GetString();
  }
269

270 271 272 273
  static const bool HasAttr(const string &key, const AttributeMap &map) {
    return map.count(key) > 0;
  }

274
  template <typename T>
W
wangliu 已提交
275
  static T *GetVarValue(const string &key, const VariableNameMap &var_map,
276
                        const Scope &scope) {
W
wangliu 已提交
277 278
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
279 280 281 282 283 284
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
朔-望's avatar
朔-望 已提交
285
    }
286
  }
朔-望's avatar
朔-望 已提交
287

288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307
  static std::string getkey(const string &key, const VariableNameMap &var_map,
                            int index) {
    auto var_vec = var_map.at(key);
    return var_vec[index];
  }

  template <typename T>
  static T *GetVarValue1(const string &key, const VariableNameMap &var_map,
                         const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[1]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
    }
  }

308
  template <typename T>
W
wangliu 已提交
309 310 311
  static vector<T *> GetMultiVarValue(const string &key,
                                      const VariableNameMap &var_map,
                                      const Scope &scope) {
312 313
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
W
wangliu 已提交
314
    vector<T *> var_res;
315 316 317
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var->GetMutable<T>());
朔-望's avatar
朔-望 已提交
318
    }
319 320
    return var_res;
  }
朔-望's avatar
朔-望 已提交
321 322
};

N
nhzlx 已提交
323
template <typename Dtype>
324
class ConvParam : public OpParam {
N
nhzlx 已提交
325 326 327
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
328
 public:
329
  ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
330
            const AttributeMap &attrs, const Scope &scope) {
331 332 333 334 335 336 337 338 339
    filter_ = OpParam::FilterFrom<GType>(inputs, scope);
    input_ = OpParam::InputFrom<GType>(inputs, scope);
    if (outputs.count("Output")) {
      output_ = OpParam::OutputFrom<GType>(outputs, scope);
    }
    strides_ = OpParam::GetAttr<vector<int>>("strides", attrs);
    paddings_ = OpParam::GetAttr<vector<int>>("paddings", attrs);
    dilations_ = OpParam::GetAttr<vector<int>>("dilations", attrs);
    groups = OpParam::GetAttr<int>("groups", attrs);
340
  }
朔-望's avatar
朔-望 已提交
341

N
nhzlx 已提交
342
  const RType *Input() const { return input_; }
朔-望's avatar
朔-望 已提交
343

N
nhzlx 已提交
344
  RType *Filter() const { return filter_; }
朔-望's avatar
朔-望 已提交
345

N
nhzlx 已提交
346
  RType *Output() const { return output_; }
朔-望's avatar
朔-望 已提交
347

W
wangliu 已提交
348
  const vector<int> &Strides() const { return strides_; }
朔-望's avatar
朔-望 已提交
349

W
wangliu 已提交
350
  const vector<int> &Paddings() const { return paddings_; }
朔-望's avatar
朔-望 已提交
351

W
wangliu 已提交
352
  const vector<int> &Dilations() const { return dilations_; }
朔-望's avatar
朔-望 已提交
353

354
  const int &Groups() const { return groups; }
朔-望's avatar
朔-望 已提交
355

朔-望's avatar
朔-望 已提交
356
 private:
N
nhzlx 已提交
357 358 359
  RType *input_;
  RType *output_;
  RType *filter_;
W
wangliu 已提交
360 361 362
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
363
  int groups;
朔-望's avatar
朔-望 已提交
364
};
N
nhzlx 已提交
365 366
template <typename Dtype>
Print &operator<<(Print &printer, const ConvParam<Dtype> &conv_param);
朔-望's avatar
朔-望 已提交
367

N
nhzlx 已提交
368
template <typename Dtype>
朔-望's avatar
朔-望 已提交
369
class ElementwiseAddParam : OpParam {
N
nhzlx 已提交
370 371 372
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
373
 public:
374
  ElementwiseAddParam(const VariableNameMap &inputs,
375 376
                      const VariableNameMap &outputs, const AttributeMap &attrs,
                      const Scope &scope) {
N
nhzlx 已提交
377 378 379
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_y_ = InputYFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
380 381 382
    axis_ = GetAttr<int>("axis", attrs);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
383
  const GType *InputX() const { return input_x_; }
384

xiebaiyuan's avatar
xiebaiyuan 已提交
385
  const GType *InputY() const { return input_y_; }
386

xiebaiyuan's avatar
xiebaiyuan 已提交
387
  GType *Out() const { return out_; }
388 389 390

  const int &Axis() const { return axis_; }

朔-望's avatar
朔-望 已提交
391
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
392 393 394
  GType *input_x_;
  GType *input_y_;
  GType *out_;
395
  int axis_;
Z
zhangyang 已提交
396 397 398
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
399
  fpga::EWAddArgs fpga_EW_add_args;
Z
zhangyang 已提交
400 401

 public:
H
hanbuhe 已提交
402 403
  const fpga::EWAddArgs &FpgaArgs() const { return fpga_EW_add_args; }
  void SetFpgaArgs(const fpga::EWAddArgs &args) { fpga_EW_add_args = args; }
Z
zhangyang 已提交
404
#endif
朔-望's avatar
朔-望 已提交
405 406
};

407
#ifdef FUSION_ELEMENTWISEADDRELU_OP
N
nhzlx 已提交
408 409
template <typename Dtype>
using ElementwiseAddReluParam = ElementwiseAddParam<Dtype>;
L
liuruilong 已提交
410 411 412
#endif

#ifdef MUL_OP
N
nhzlx 已提交
413
template <typename Dtype>
朔-望's avatar
朔-望 已提交
414
class MulParam : OpParam {
N
nhzlx 已提交
415 416 417
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
418
 public:
419
  MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
420
           const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
421 422 423
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_y_ = InputYFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
424 425 426
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
  }
朔-望's avatar
朔-望 已提交
427

xiebaiyuan's avatar
xiebaiyuan 已提交
428
  const GType *InputX() const { return input_x_; }
朔-望's avatar
朔-望 已提交
429

xiebaiyuan's avatar
xiebaiyuan 已提交
430
  const GType *InputY() const { return input_y_; }
朔-望's avatar
朔-望 已提交
431

xiebaiyuan's avatar
xiebaiyuan 已提交
432
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
433

434
  const int &XNumColDims() const { return x_num_col_dims_; }
朔-望's avatar
朔-望 已提交
435

436
  const int &YNumColDims() const { return y_num_col_dims_; }
朔-望's avatar
朔-望 已提交
437

朔-望's avatar
朔-望 已提交
438
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
439 440 441
  GType *input_x_;
  GType *input_y_;
  GType *out_;
442 443
  int x_num_col_dims_;
  int y_num_col_dims_;
朔-望's avatar
朔-望 已提交
444
};
L
liuruilong 已提交
445
#endif
朔-望's avatar
朔-望 已提交
446

L
liuruilong 已提交
447
#ifdef CONCAT_OP
N
nhzlx 已提交
448
template <typename Dtype>
朔-望's avatar
朔-望 已提交
449
class ConcatParam : public OpParam {
N
nhzlx 已提交
450 451 452
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
453
 public:
454
  ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
455
              const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
456 457
    inputs_ = InputMultiFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
458 459
    axis_ = GetAttr<int>("axis", attrs);
  }
朔-望's avatar
朔-望 已提交
460

N
nhzlx 已提交
461
  vector<GType *> Inputs() const { return inputs_; }
朔-望's avatar
朔-望 已提交
462

xiebaiyuan's avatar
xiebaiyuan 已提交
463
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
464

465
  const int &Axis() const { return axis_; }
朔-望's avatar
朔-望 已提交
466

朔-望's avatar
朔-望 已提交
467
 private:
N
nhzlx 已提交
468
  vector<GType *> inputs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
469
  GType *out_;
470
  int axis_;
Z
zhangyang 已提交
471 472 473 474 475 476 477 478 479
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::ConcatArgs fpga_concat_args;

 public:
  const fpga::ConcatArgs &FpgaArgs() const { return fpga_concat_args; }
  void SetFpgaArgs(const fpga::ConcatArgs &args) { fpga_concat_args = args; }
#endif
朔-望's avatar
朔-望 已提交
480
};
L
liuruilong 已提交
481
#endif
朔-望's avatar
朔-望 已提交
482

L
liuruilong 已提交
483
#ifdef LRN_OP
N
nhzlx 已提交
484
template <typename Dtype>
E
eclipsess 已提交
485
class LrnParam : public OpParam {
N
nhzlx 已提交
486 487 488
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
489
 public:
490
  LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
491
           const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
492 493 494
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
    mid_out_ = MidOutFrom<GType>(outputs, scope);
495 496 497 498
    n_ = GetAttr<int>("n", attrs);
    alpha_ = GetAttr<float>("alpha", attrs);
    beta_ = GetAttr<float>("beta", attrs);
    k_ = GetAttr<float>("k", attrs);
499
    data_format_ = GetStringAttr("data_format", attrs);
500
  }
E
eclipsess 已提交
501

N
nhzlx 已提交
502
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
503

N
nhzlx 已提交
504
  RType *Out() const { return out_; }
E
eclipsess 已提交
505

N
nhzlx 已提交
506
  RType *MidOut() const { return mid_out_; }
E
eclipsess 已提交
507

508
  const int &N() const { return n_; }
E
eclipsess 已提交
509

510
  const float &Alpha() const { return alpha_; }
E
eclipsess 已提交
511

512
  const float &Beta() const { return beta_; }
E
eclipsess 已提交
513

514
  const float &K() const { return k_; }
E
eclipsess 已提交
515

W
wangliu 已提交
516
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
517

朔-望's avatar
朔-望 已提交
518
 private:
N
nhzlx 已提交
519 520 521
  RType *input_x_;
  RType *out_;
  RType *mid_out_;
522 523 524 525
  int n_;
  float alpha_;
  float beta_;
  float k_;
W
wangliu 已提交
526
  string data_format_;
E
eclipsess 已提交
527
};
L
liuruilong 已提交
528 529 530
#endif

#ifdef BATCHNORM_OP
N
nhzlx 已提交
531
template <typename Dtype>
E
eclipsess 已提交
532
class BatchNormParam : OpParam {
N
nhzlx 已提交
533 534 535
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
536
 public:
537
  BatchNormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
538
                 const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
539 540 541 542 543 544
    input_x_ = InputXFrom<GType>(inputs, scope);
    output_y_ = OutputYFrom<GType>(outputs, scope);
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    input_mean_ = InputMeanFrom<GType>(inputs, scope);
    input_scale_ = InputScaleFrom<GType>(inputs, scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, scope);
545 546
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
L
liuruilong 已提交
547
    //    is_test_ = GetAttr<bool>("is_test", attrs);
548
  }
E
eclipsess 已提交
549

N
nhzlx 已提交
550
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
551

N
nhzlx 已提交
552
  RType *OutputY() const { return output_y_; }
E
eclipsess 已提交
553

N
nhzlx 已提交
554
  const RType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
555

N
nhzlx 已提交
556
  const RType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
557

N
nhzlx 已提交
558
  const RType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
559

N
nhzlx 已提交
560
  const RType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
561

562
  const float &Epsilon() const { return epsilon_; }
E
eclipsess 已提交
563

564
  const float &Momentum() const { return momentum_; }
E
eclipsess 已提交
565

566
  const bool &IsTest() const { return is_test_; }
E
eclipsess 已提交
567

W
wangliu 已提交
568
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
569

朔-望's avatar
朔-望 已提交
570
 private:
N
nhzlx 已提交
571 572 573 574 575 576
  RType *input_x_;
  RType *output_y_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
577 578 579
  float epsilon_;
  float momentum_;
  bool is_test_;
W
wangliu 已提交
580
  string data_format_;
E
eclipsess 已提交
581
};
L
liuruilong 已提交
582 583 584
#endif

#ifdef POOL_OP
N
nhzlx 已提交
585
template <typename Dtype>
586
class PoolParam : public OpParam {
N
nhzlx 已提交
587 588 589
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
590
 public:
591
  PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
592
            const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
593
    input_ = InputXFrom<GType>(inputs, scope);
594

N
nhzlx 已提交
595
    output_ = OutFrom<GType>(outputs, scope);
596
    pooling_type_ = GetStringAttr("pooling_type", attrs);
W
wangliu 已提交
597 598 599
    ksize_ = GetAttr<vector<int>>("ksize", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
600
    ceil_mode_ = GetAttr<bool>("ceil_mode", attrs);
601
    global_pooling_ = GetAttr<bool>("global_pooling", attrs);
602
  }
603

N
nhzlx 已提交
604
  const RType *Input() const { return input_; }
605

N
nhzlx 已提交
606
  RType *Output() const { return output_; }
607

W
wangliu 已提交
608
  const string &PoolingType() const { return pooling_type_; }
609

W
wangliu 已提交
610
  const vector<int> &Ksize() const { return ksize_; }
611

W
wangliu 已提交
612
  const vector<int> &Strides() const { return strides_; }
613

W
wangliu 已提交
614
  const vector<int> &Paddings() const { return paddings_; }
615

616
  bool isCeilMode() const { return ceil_mode_; }
617

Z
zhangyang 已提交
618
  bool isGlobalPooling() const { return global_pooling_; }
619

朔-望's avatar
朔-望 已提交
620
 private:
N
nhzlx 已提交
621 622
  RType *input_;
  RType *output_;
W
wangliu 已提交
623 624 625 626
  string pooling_type_;
  vector<int> ksize_;
  vector<int> strides_;
  vector<int> paddings_;
627
  bool ceil_mode_;
628
  bool global_pooling_ = false;
Z
zhangyang 已提交
629
#ifdef PADDLE_MOBILE_FPGA
630 631

 private:
H
hanbuhe 已提交
632
  fpga::PoolingArgs fpga_pool_args;
Z
zhangyang 已提交
633 634

 public:
H
hanbuhe 已提交
635 636
  const fpga::PoolingArgs &FpgaArgs() const { return fpga_pool_args; }
  void SetFpgaArgs(const fpga::PoolingArgs &args) { fpga_pool_args = args; }
Z
zhangyang 已提交
637
#endif
638
};
L
liuruilong 已提交
639 640 641
#endif

#ifdef PRIORBOX_OP
N
nhzlx 已提交
642
template <typename Dtype>
E
eclipsess 已提交
643
class PriorBoxParam : public OpParam {
N
nhzlx 已提交
644 645 646
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
647 648
 public:
  PriorBoxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
649
                const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
650 651 652 653
    input_ = InputFrom<GType>(inputs, scope);
    input_image_ = InputImageFrom<GType>(inputs, scope);
    output_boxes_ = OutputBoxesFrom<GType>(outputs, scope);
    output_variances_ = OutputVariancesFrom<GType>(outputs, scope);
W
wangliu 已提交
654 655 656 657
    min_sizes_ = GetAttr<vector<float>>("min_sizes", attrs);
    max_sizes_ = GetAttr<vector<float>>("max_sizes", attrs);
    aspect_ratios_ = GetAttr<vector<float>>("aspect_ratios", attrs);
    variances_ = GetAttr<vector<float>>("variances", attrs);
658 659 660 661 662

    if (HasAttr("min_max_aspect_ratios_order", attrs)) {
      min_max_aspect_ratios_order_ =
          GetAttr<bool>("min_max_aspect_ratios_order", attrs);
    }
E
eclipsess 已提交
663 664 665 666 667 668
    flip_ = GetAttr<bool>("flip", attrs);
    clip_ = GetAttr<bool>("clip", attrs);
    step_w_ = GetAttr<float>("step_w", attrs);
    step_h_ = GetAttr<float>("step_h", attrs);
    offset_ = GetAttr<float>("offset", attrs);
  }
N
nhzlx 已提交
669
  const RType *Input() const { return input_; }
E
eclipsess 已提交
670

N
nhzlx 已提交
671
  const RType *InputImage() const { return input_image_; }
E
eclipsess 已提交
672

N
nhzlx 已提交
673
  RType *OutputBoxes() const { return output_boxes_; }
E
eclipsess 已提交
674

N
nhzlx 已提交
675
  RType *OutputVariances() const { return output_variances_; }
E
eclipsess 已提交
676

W
wangliu 已提交
677
  const vector<float> &MinSizes() const { return min_sizes_; }
E
eclipsess 已提交
678

W
wangliu 已提交
679
  const vector<float> &MaxSizes() const { return max_sizes_; }
E
eclipsess 已提交
680

W
wangliu 已提交
681
  const vector<float> &AspectRatios() const { return aspect_ratios_; }
E
eclipsess 已提交
682

W
wangliu 已提交
683
  const vector<float> &Variances() const { return variances_; }
E
eclipsess 已提交
684 685 686 687 688 689 690 691 692 693 694

  const bool &Flip() const { return flip_; }

  const bool &Clip() const { return clip_; }

  const float &StepW() const { return step_w_; }

  const float &StepH() const { return step_h_; }

  const float &Offset() const { return offset_; }

695 696 697 698
  const bool &MinMaxAspectRatiosOrder() const {
    return min_max_aspect_ratios_order_;
  }

E
eclipsess 已提交
699
 private:
N
nhzlx 已提交
700 701 702 703
  RType *input_;
  RType *input_image_;
  RType *output_boxes_;
  RType *output_variances_;
W
wangliu 已提交
704 705 706 707
  vector<float> min_sizes_;
  vector<float> max_sizes_;
  vector<float> aspect_ratios_;
  vector<float> variances_;
E
eclipsess 已提交
708 709 710 711 712
  bool flip_;
  bool clip_;
  float step_w_;
  float step_h_;
  float offset_;
713
  bool min_max_aspect_ratios_order_;
E
eclipsess 已提交
714
};
L
liuruilong 已提交
715
#endif
E
eclipsess 已提交
716

L
liuruilong 已提交
717
#ifdef BOXCODER_OP
N
nhzlx 已提交
718
template <typename Dtype>
E
eclipsess 已提交
719
class BoxCoderParam : public OpParam {
N
nhzlx 已提交
720 721 722
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
723 724
 public:
  BoxCoderParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
725
                const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
726 727 728 729
    input_priorbox_ = InputPriorBoxFrom<GType>(inputs, scope);
    input_priorboxvar_ = InputPriorBoxVarFrom<GType>(inputs, scope);
    input_targetbox_ = InputTargetBoxFrom<GType>(inputs, scope);
    output_box_ = OutputBoxFrom<GType>(outputs, scope);
730
    code_type_ = GetStringAttr("code_type", attrs);
E
eclipsess 已提交
731
  }
N
nhzlx 已提交
732
  const RType *InputPriorBox() const { return input_priorbox_; }
E
eclipsess 已提交
733

N
nhzlx 已提交
734
  const RType *InputPriorBoxVar() const { return input_priorboxvar_; }
E
eclipsess 已提交
735

N
nhzlx 已提交
736
  const RType *InputTargetBox() const { return input_targetbox_; }
E
eclipsess 已提交
737

N
nhzlx 已提交
738
  RType *OutputBox() const { return output_box_; }
E
eclipsess 已提交
739 740 741 742

  const std::string &CodeType() const { return code_type_; }

 private:
N
nhzlx 已提交
743 744 745 746
  RType *input_priorbox_;
  RType *input_priorboxvar_;
  RType *input_targetbox_;
  RType *output_box_;
E
eclipsess 已提交
747 748
  std::string code_type_;
};
L
liuruilong 已提交
749
#endif
W
wangliu 已提交
750

L
liuruilong 已提交
751
#ifdef SOFTMAX_OP
N
nhzlx 已提交
752
template <typename Dtype>
W
wangliu 已提交
753
class SoftmaxParam : public OpParam {
N
nhzlx 已提交
754 755 756
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
757 758
 public:
  SoftmaxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
759
               const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
760 761
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
W
wangliu 已提交
762
  }
N
nhzlx 已提交
763 764
  const RType *InputX() const { return input_x_; }
  RType *Out() const { return out_; }
W
wangliu 已提交
765 766

 private:
N
nhzlx 已提交
767 768
  RType *input_x_;
  RType *out_;
H
hanbuhe 已提交
769 770 771 772

#ifdef PADDLE_MOBILE_FPGA

 private:
N
nhzlx 已提交
773
  std::shared_ptr<RType> float_input_x_;
H
hanbuhe 已提交
774 775 776
  fpga::BypassArgs fpga_bypass_args;

 public:
777
  RType *FloatInput() const {
H
hanbuhe 已提交
778 779 780 781 782 783
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
  void SetFloatInput(Tensor *input) { float_input_x_.reset(input); }
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
W
wangliu 已提交
784
};
L
liuruilong 已提交
785
#endif
W
wangliu 已提交
786

L
liuruilong 已提交
787
#ifdef SIGMOID_OP
N
nhzlx 已提交
788
template <typename Dtype>
W
wangliu 已提交
789
class SigmoidParam : public OpParam {
N
nhzlx 已提交
790 791 792
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
793 794
 public:
  SigmoidParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
795
               const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
796 797
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
W
wangliu 已提交
798
  }
N
nhzlx 已提交
799 800
  const RType *InputX() const { return input_x_; }
  RType *Out() const { return out_; }
W
wangliu 已提交
801 802

 private:
N
nhzlx 已提交
803 804
  RType *input_x_;
  RType *out_;
W
wangliu 已提交
805
};
L
liuruilong 已提交
806 807 808
#endif

#ifdef MULTICLASSNMS_OP
N
nhzlx 已提交
809
template <typename Dtype>
E
eclipsess 已提交
810
class MultiClassNMSParam : public OpParam {
N
nhzlx 已提交
811 812 813
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
814 815 816 817
 public:
  MultiClassNMSParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
                     const Scope &scope) {
N
nhzlx 已提交
818 819 820
    input_bboxes_ = InputBBoxesFrom<GType>(inputs, scope);
    input_scores_ = InputScoresFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
821 822 823 824 825 826 827 828
    background_label_ = GetAttr<int>("background_label", attrs);
    nms_top_k_ = GetAttr<int>("nms_top_k", attrs);
    keep_top_k_ = GetAttr<int>("keep_top_k", attrs);
    nms_threshold_ = GetAttr<float>("nms_threshold", attrs);
    nms_eta_ = GetAttr<float>("nms_eta", attrs);
    score_threshold_ = GetAttr<float>("score_threshold", attrs);
  }

N
nhzlx 已提交
829
  const RType *InputBBoxes() const { return input_bboxes_; }
E
eclipsess 已提交
830

N
nhzlx 已提交
831
  const RType *InputScores() const { return input_scores_; }
E
eclipsess 已提交
832

N
nhzlx 已提交
833
  RType *Out() const { return out_; }
E
eclipsess 已提交
834 835 836 837 838 839 840 841 842 843 844 845 846 847

  const int &BackGroundLabel() const { return background_label_; }

  const int &NMSTopK() const { return nms_top_k_; }

  const int &KeepTopK() const { return keep_top_k_; }

  const float &NMSThreshold() const { return nms_threshold_; }

  const float &NMSEta() const { return nms_eta_; }

  const float &ScoreThreshold() const { return score_threshold_; }

 private:
N
nhzlx 已提交
848 849 850
  RType *input_bboxes_;
  RType *input_scores_;
  RType *out_;
E
eclipsess 已提交
851 852 853 854 855 856 857
  int background_label_;
  int nms_top_k_;
  int keep_top_k_;
  float nms_threshold_;
  float nms_eta_;
  float score_threshold_;
};
L
liuruilong 已提交
858
#endif
W
wangliu 已提交
859

N
nhzlx 已提交
860
template <typename Dtype>
L
liuruilong 已提交
861
class FeedParam : public OpParam {
N
nhzlx 已提交
862 863 864
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
865 866
 public:
  FeedParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
L
liuruilong 已提交
867
            const AttributeMap &attrs, Scope *scope) {
N
nhzlx 已提交
868 869
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
L
liuruilong 已提交
870
    auto var = scope->Var("batch_size");
W
wangliu 已提交
871
    batch_size = var->GetValue<int>();
L
liuruilong 已提交
872
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
873 874
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
875
  const int BatchSize() const { return batch_size; }
L
liuruilong 已提交
876

L
liuruilong 已提交
877
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
878 879
  GType *input_x_;
  GType *out_;
W
wangliu 已提交
880
  int batch_size;
L
liuruilong 已提交
881 882
};

N
nhzlx 已提交
883
template <typename Dtype>
L
liuruilong 已提交
884
class FetchParam : public OpParam {
N
nhzlx 已提交
885 886 887
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
888 889
 public:
  FetchParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
890
             const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
891 892
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
L
liuruilong 已提交
893
  }
N
nhzlx 已提交
894 895
  const RType *InputX() const { return input_x_; }
  RType *Out() const { return out_; }
L
liuruilong 已提交
896

L
liuruilong 已提交
897
 private:
N
nhzlx 已提交
898 899
  RType *input_x_;
  RType *out_;
L
liuruilong 已提交
900 901
};

L
liuruilong 已提交
902
#ifdef TRANSPOSE_OP
N
nhzlx 已提交
903
template <typename Dtype>
E
eclipsess 已提交
904
class TransposeParam : public OpParam {
N
nhzlx 已提交
905 906 907
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
908 909 910
 public:
  TransposeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                 const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
911 912
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
913 914 915
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

N
nhzlx 已提交
916
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
917

N
nhzlx 已提交
918
  RType *Out() const { return out_; }
E
eclipsess 已提交
919 920 921 922

  const vector<int> &Axis() const { return axis_; }

 private:
N
nhzlx 已提交
923 924
  RType *input_x_;
  RType *out_;
E
eclipsess 已提交
925 926
  vector<int> axis_;
};
L
liuruilong 已提交
927
#endif
E
eclipsess 已提交
928

xiebaiyuan's avatar
xiebaiyuan 已提交
929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994
#ifdef LOOKUP_OP
template <typename Dtype>
class LookupParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LookupParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const AttributeMap &attrs, const Scope &scope) {
    input_w_ = InputWFrom<GType>(inputs, scope);
    input_ids_ = InputIdsFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }

  const GType *InputW() const { return input_w_; }
  const GType *InputIds() const { return input_ids_; }
  GType *Out() const { return out_; }
  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_w_;
  GType *input_ids_;
  GType *out_;
  int64_t padding_idx_;
};
#endif

#ifdef CRF_OP
template <typename Dtype>
class CrfParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  //    {G_OP_TYPE_CRF, {{"Emission", "Transition", "Label"}, {"ViterbiPath"}}},

  CrfParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
           const AttributeMap &attrs, const Scope &scope) {
    // todo crf params
    input_emission_ = InputEmissionFrom<GType>(inputs, scope);
    input_transition_ = InputTransitionFrom<GType>(inputs, scope);
    input_label_ = InputLabelFrom<GType>(inputs, scope);
    output_viterbipath_ = OutputViterbiPathFrom<GType>(outputs, scope);
    //    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }
  const GType *InputEmission() const { return input_emission_; }
  const GType *InputTransition() const { return input_transition_; }
  const GType *InputLabel() const { return input_label_; }
  GType *outputVBP() const { return output_viterbipath_; }
  //  const RType *InputIds() const { return input_ids_; }
  //  RType *Out() const { return out_; }
  //  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_emission_;
  GType *input_transition_;
  GType *input_label_;
  GType *output_viterbipath_;

  //  RType *input_ids_;
  //  RType *out_;
  //  int64_t padding_idx_;
};
#endif

L
liuruilong 已提交
995
#ifdef RESHAPE_OP
N
nhzlx 已提交
996
template <typename Dtype>
E
eclipsess 已提交
997
class ReshapeParam : public OpParam {
N
nhzlx 已提交
998 999 1000
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1001 1002 1003
 public:
  ReshapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
               const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1004 1005 1006
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_shape_ = InputShapeFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
1007
    shape_ = GetAttr<vector<int>>("shape", attrs);
1008 1009 1010 1011 1012 1013 1014

    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
      DLOG << "ReshapeParam lost inplace params. maybe fluid updated";
    }
E
eclipsess 已提交
1015 1016
  }

N
nhzlx 已提交
1017
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
1018

N
nhzlx 已提交
1019
  const RType *InputShape() const { return input_shape_; }
E
eclipsess 已提交
1020

N
nhzlx 已提交
1021
  RType *Out() const { return out_; }
E
eclipsess 已提交
1022 1023 1024 1025 1026 1027

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
N
nhzlx 已提交
1028 1029 1030
  RType *input_x_;
  RType *input_shape_;
  RType *out_;
E
eclipsess 已提交
1031 1032 1033
  vector<int> shape_;
  bool inplace_;
};
L
liuruilong 已提交
1034
#endif
E
eclipsess 已提交
1035

T
Tian 已提交
1036
#ifdef SCALE_OP
N
nhzlx 已提交
1037
template <typename Dtype>
I
itminner 已提交
1038
class ScaleParam : public OpParam {
N
nhzlx 已提交
1039 1040 1041
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1042 1043 1044
 public:
  ScaleParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1045 1046 1047
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
I
itminner 已提交
1048 1049 1050 1051 1052 1053
    inplace_ = GetAttr<bool>("inplace", attrs);
    has_bias_ = GetAttr<bool>("has_bias", attrs);
    scales_ = GetAttr<vector<float>>("scales", attrs);
    biases_ = GetAttr<vector<float>>("biases", attrs);
  }

N
nhzlx 已提交
1054
  const RType *InputX() const { return input_x_; }
I
itminner 已提交
1055

N
nhzlx 已提交
1056
  const RType *InputBias() const { return input_bias_; }
I
itminner 已提交
1057

N
nhzlx 已提交
1058
  RType *Out() const { return out_; }
I
itminner 已提交
1059 1060 1061 1062 1063 1064 1065 1066 1067 1068

  const bool &Inplace() const { return inplace_; }

  const bool &HasBias() const { return has_bias_; }

  const vector<float> &Scales() const { return scales_; }

  const vector<float> &Biases() const { return biases_; }

 private:
N
nhzlx 已提交
1069 1070 1071
  RType *input_x_;
  RType *input_bias_;
  RType *out_;
I
itminner 已提交
1072 1073 1074 1075 1076
  bool inplace_;
  bool has_bias_;
  vector<float> scales_;
  vector<float> biases_;
};
T
Tian 已提交
1077 1078 1079
#endif

#ifdef SLICE_OP
N
nhzlx 已提交
1080
template <typename Dtype>
I
itminner 已提交
1081
class SliceParam : public OpParam {
N
nhzlx 已提交
1082 1083 1084
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1085 1086 1087
 public:
  SliceParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1088 1089 1090
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_shape_ = InputShapeFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
I
itminner 已提交
1091 1092 1093 1094 1095
    axis_ = GetAttr<int>("axis", attrs);
    slice_points_ = GetAttr<vector<int>>("slice_points", attrs);
    inplace_ = GetAttr<bool>("inplace", attrs);
  }

N
nhzlx 已提交
1096
  const RType *InputX() const { return input_x_; }
I
itminner 已提交
1097

N
nhzlx 已提交
1098
  const RType *InputShape() const { return input_shape_; }
I
itminner 已提交
1099

N
nhzlx 已提交
1100
  RType *Out() const { return out_; }
I
itminner 已提交
1101 1102 1103 1104 1105 1106 1107 1108

  const int &Axis() const { return axis_; }

  const vector<int> &SlicePoints() const { return slice_points_; }

  const bool &Inplace() const { return inplace_; }

 private:
N
nhzlx 已提交
1109 1110 1111
  RType *input_x_;
  RType *input_shape_;
  RType *out_;
I
itminner 已提交
1112 1113 1114 1115
  int axis_;
  vector<int> slice_points_;
  bool inplace_;
};
T
Tian 已提交
1116 1117 1118
#endif

#ifdef RESIZE_OP
N
nhzlx 已提交
1119
template <typename Dtype>
T
Tian 已提交
1120
class ResizeParam : public OpParam {
N
nhzlx 已提交
1121 1122 1123
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1124 1125 1126
 public:
  ResizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1127 1128 1129
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_shape_ = InputShapeFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
I
itminner 已提交
1130 1131 1132 1133 1134 1135
    is_pyramid_test_ = GetAttr<bool>("is_pyramid_test", attrs);
    height_ = GetAttr<int>("height", attrs);
    width_ = GetAttr<int>("width", attrs);
    out_height_scale_ = GetAttr<float>("out_height_scale", attrs);
    out_width_scale_ = GetAttr<float>("out_width_scale", attrs);
  }
T
Tian 已提交
1136

N
nhzlx 已提交
1137
  const RType *InputX() const { return input_x_; }
T
Tian 已提交
1138

N
nhzlx 已提交
1139
  const RType *InputShape() const { return input_shape_; }
T
Tian 已提交
1140

N
nhzlx 已提交
1141
  RType *Out() const { return out_; }
T
Tian 已提交
1142

I
itminner 已提交
1143
  const bool &IsPyramidTest() const { return is_pyramid_test_; }
T
Tian 已提交
1144

I
itminner 已提交
1145
  const int &Height() const { return height_; }
T
Tian 已提交
1146

I
itminner 已提交
1147
  const int &Width() const { return width_; }
T
Tian 已提交
1148

I
itminner 已提交
1149
  const float &OutHeightScale() const { return out_height_scale_; }
T
Tian 已提交
1150

I
itminner 已提交
1151
  const float &OutWidthScale() const { return out_width_scale_; }
T
Tian 已提交
1152

I
itminner 已提交
1153
 private:
N
nhzlx 已提交
1154 1155 1156
  RType *input_x_;
  RType *input_shape_;
  RType *out_;
I
itminner 已提交
1157 1158 1159 1160 1161
  bool is_pyramid_test_;
  int height_;
  int width_;
  float out_height_scale_;
  float out_width_scale_;
T
Tian 已提交
1162 1163 1164
};
#endif

L
liuruilong 已提交
1165
#ifdef RELU_OP
L
liuruilong 已提交
1166 1167 1168
/*
 * @b op 层实例化好这个 param 传递给 kernel 层使用
 * */
N
nhzlx 已提交
1169
template <typename Dtype>
E
eclipsess 已提交
1170
class ReluParam : public OpParam {
N
nhzlx 已提交
1171 1172 1173
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1174 1175 1176
 public:
  ReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
            const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1177 1178
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
1179 1180
  }

N
nhzlx 已提交
1181
  const RType *InputX() const { return input_x_; }
E
eclipsess 已提交
1182

N
nhzlx 已提交
1183
  RType *Out() const { return out_; }
E
eclipsess 已提交
1184 1185

 private:
N
nhzlx 已提交
1186 1187
  RType *input_x_;
  RType *out_;
E
eclipsess 已提交
1188
};
L
liuruilong 已提交
1189
#endif
E
eclipsess 已提交
1190

T
Tian 已提交
1191
#ifdef PRELU_OP
N
nhzlx 已提交
1192
template <typename Dtype>
T
Tian 已提交
1193
class PReluParam : public OpParam {
N
nhzlx 已提交
1194 1195 1196
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1197 1198 1199
 public:
  PReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, const Scope &scope) {
1200
    DLOG << "PReluParam inputs before";
N
nhzlx 已提交
1201
    input_x_ = InputXFrom<GType>(inputs, scope);
N
nhzlx 已提交
1202
    alpha_ = InputAlphaFrom<GType>(inputs, scope);
1203
    framework::DDim dims = alpha_->dims();
N
nhzlx 已提交
1204
    out_ = OutFrom<GType>(outputs, scope);
1205
    mode_ = GetStringAttr("mode", attrs);
1206
    DLOG << "PReluParam mode after" << mode_;
I
itminner 已提交
1207
  }
N
nhzlx 已提交
1208
  const RType *InputX() const { return input_x_; }
N
nhzlx 已提交
1209
  const RType *InputAlpha() const { return alpha_; }
N
nhzlx 已提交
1210
  RType *Out() const { return out_; }
1211
  const std::string &Mode() const { return mode_; }
T
Tian 已提交
1212

I
itminner 已提交
1213
 private:
N
nhzlx 已提交
1214 1215
  RType *input_x_;
  RType *out_;
N
nhzlx 已提交
1216
  RType *alpha_;
1217
  std::string mode_;
T
Tian 已提交
1218 1219 1220
};
#endif

N
nhzlx 已提交
1221
template <typename Dtype>
L
liuruilong 已提交
1222
class FusionFcParam : public OpParam {
N
nhzlx 已提交
1223 1224 1225
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1226
 public:
L
liuruilong 已提交
1227
  FusionFcParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
L
liuruilong 已提交
1228
                const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1229 1230 1231 1232
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_y_ = InputYFrom<GType>(inputs, scope);
    input_z_ = InputZFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
E
eclipsess 已提交
1233 1234 1235 1236
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
1237
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1238

N
nhzlx 已提交
1239
  const RType *InputY() const { return input_y_; }
E
eclipsess 已提交
1240

N
nhzlx 已提交
1241
  const RType *InputZ() const { return input_z_; }
E
eclipsess 已提交
1242

xiebaiyuan's avatar
xiebaiyuan 已提交
1243
  GType *Out() const { return out_; }
E
eclipsess 已提交
1244 1245 1246 1247 1248 1249 1250 1251

  const int &XNumColDims() const { return x_num_col_dims_; }

  const int &YNumColDims() const { return y_num_col_dims_; }

  const int &Axis() const { return axis_; }

 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
1252
  GType *input_x_;
N
nhzlx 已提交
1253 1254
  RType *input_y_;
  RType *input_z_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1255
  GType *out_;
E
eclipsess 已提交
1256 1257 1258
  int x_num_col_dims_;
  int y_num_col_dims_;
  int axis_;
Z
zhangyang 已提交
1259 1260 1261
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1262
  fpga::WrapperConvArgs fpga_conv_args;
Z
zhangyang 已提交
1263 1264

 public:
Z
zhangyang 已提交
1265 1266
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1267
#endif
E
eclipsess 已提交
1268
};
1269 1270

#ifdef FUSION_FCRELU_OP
N
nhzlx 已提交
1271 1272
template <typename DeviceType>
using FusionFcReluParam = FusionFcParam<DeviceType>;
L
liuruilong 已提交
1273
#endif
E
eclipsess 已提交
1274

N
nhzlx 已提交
1275
template <typename Dtype>
1276
class FusionConvAddParam : public ConvParam<Dtype> {
N
nhzlx 已提交
1277 1278 1279
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1280
 public:
L
liuruilong 已提交
1281
  FusionConvAddParam(const VariableNameMap &inputs,
L
liuruilong 已提交
1282
                     const VariableNameMap &outputs, const AttributeMap &attrs,
1283 1284 1285 1286 1287
                     const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    bias_ = OpParam::InputYFrom<GType>(inputs, scope);
    axis_ = OpParam::GetAttr<int>("axis", attrs);
    output_ = OpParam::OutFrom<GType>(outputs, scope);
W
wangliu 已提交
1288
  }
N
nhzlx 已提交
1289
  RType *Bias() const { return bias_; }
W
wangliu 已提交
1290 1291 1292

  const int &Axis() const { return axis_; }

N
nhzlx 已提交
1293
  RType *Output() const { return output_; }
W
wangliu 已提交
1294

L
liuruilong 已提交
1295
 protected:
N
nhzlx 已提交
1296
  RType *bias_;
W
wangliu 已提交
1297
  int axis_;
N
nhzlx 已提交
1298
  RType *output_;
Z
zhangyang 已提交
1299 1300 1301
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1302
  fpga::WrapperConvArgs fpga_conv_args;
Z
zhangyang 已提交
1303 1304

 public:
Z
zhangyang 已提交
1305 1306
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1307
#endif
W
wangliu 已提交
1308 1309
};

N
nhzlx 已提交
1310 1311
template <typename Dtype>
Print &operator<<(Print &printer, const FusionConvAddParam<Dtype> &conv_param);
W
wangliu 已提交
1312

Z
zhangyang 已提交
1313
#ifdef FUSION_CONVADDRELU_OP
N
nhzlx 已提交
1314 1315
template <typename DeviceType>
class FusionConvAddReluParam : public FusionConvAddParam<DeviceType> {
L
liuruilong 已提交
1316
 public:
L
liuruilong 已提交
1317
  FusionConvAddReluParam(const VariableNameMap &inputs,
L
liuruilong 已提交
1318 1319
                         const VariableNameMap &outputs,
                         const AttributeMap &attrs, const Scope &scope)
N
nhzlx 已提交
1320
      : FusionConvAddParam<DeviceType>(inputs, outputs, attrs, scope) {}
L
liuruilong 已提交
1321 1322 1323
};
#endif

1324
#ifdef FUSION_CONVADDPRELU_OP
1325 1326 1327 1328
template <typename Dtype>
class FusionConvAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
1329 1330 1331 1332

 public:
  FusionConvAddPReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
1333 1334 1335
                          const AttributeMap &attrs, const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, scope);
1336
    mode_ = OpParam::GetStringAttr("mode", attrs);
1337
    framework::DDim dims = alpha_->dims();
1338 1339 1340
    bias_ = OpParam::InputYFrom<GType>(inputs, scope);
    axis_ = OpParam::GetAttr<int>("axis", attrs);
    output_ = OpParam::OutFrom<GType>(outputs, scope);
1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356
  }
  const RType *InputAlpha() const { return alpha_; }
  const std::string &Mode() const { return mode_; }
  RType *Bias() const { return bias_; }
  const int &Axis() const { return axis_; }
  RType *Output() const { return output_; }

 protected:
  RType *bias_;
  int axis_;
  RType *output_;
  RType *alpha_;
  std::string mode_;
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1357
  fpga::WrapperConvArgs fpga_conv_args;
1358 1359

 public:
Z
zhangyang 已提交
1360 1361
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
1362 1363 1364 1365 1366
#endif
};
#endif

#ifdef FUSION_CONVADDADDPRELU_OP
1367 1368 1369 1370
template <typename Dtype>
class FusionConvAddAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
1371 1372 1373 1374

 public:
  FusionConvAddAddPReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
1375 1376 1377 1378
                             const AttributeMap &attrs, const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    bias1_ = OpParam::InputYFrom1<GType>(inputs, scope);
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, scope);
1379
    mode_ = OpParam::GetStringAttr("mode", attrs);
1380
    framework::DDim dims = alpha_->dims();
1381 1382 1383 1384 1385 1386
    bias_ = OpParam::InputYFrom<GType>(inputs, scope);
    output_ = OpParam::OutFrom<GType>(outputs, scope);
    axis_ = OpParam::GetAttr<int>("axis", attrs);
    keyOutput_ = OpParam::getkey("addOut", inputs, 0);
    keyX1_ = OpParam::getkey("addX", inputs, 1);
    keyY1_ = OpParam::getkey("Y", inputs, 1);
1387
    if (keyX1_ == keyOutput_) {
1388
      bias1_ = OpParam::InputYFrom1<GType>(inputs, scope);
1389
    } else if (keyY1_ == keyOutput_) {
1390
      bias1_ = OpParam::InputXFrom1<GType>(inputs, scope);
1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414
    }
  }
  const RType *InputAlpha() const { return alpha_; }
  const std::string &Mode() const { return mode_; }
  const RType *Bias1() const { return bias1_; }

  RType *Bias() const { return bias_; }

  const int &Axis() const { return axis_; }
  RType *Output() const { return output_; }

 protected:
  RType *bias_;
  int axis_;
  RType *output_;
  RType *alpha_;
  std::string mode_;
  RType *bias1_;
  std::string keyOutput_;
  std::string keyX1_;
  std::string keyY1_;
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1415
  fpga::WrapperConvArgs fpga_conv_args;
1416 1417

 public:
Z
zhangyang 已提交
1418 1419
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
1420 1421 1422 1423
#endif
};
#endif

E
eclipsess 已提交
1424
#ifdef FUSION_CONVADDBNRELU_OP
N
nhzlx 已提交
1425
template <typename Dtype>
1426
class FusionConvAddBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
1427 1428 1429
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1430 1431 1432
 public:
  FusionConvAddBNReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444
                           const AttributeMap &attrs, const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    bias_ = OpParam::InputYFrom<GType>(inputs, scope);
    axis_ = OpParam::GetAttr<int>("axis", attrs);
    output_ = OpParam::OutFrom<GType>(outputs, scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, scope);
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
E
eclipsess 已提交
1445
  }
N
nhzlx 已提交
1446
  RType *Bias() const { return bias_; }
E
eclipsess 已提交
1447 1448 1449

  const int &Axis() const { return axis_; }

N
nhzlx 已提交
1450
  RType *Output() const { return output_; }
E
eclipsess 已提交
1451

N
nhzlx 已提交
1452
  const RType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
1453

N
nhzlx 已提交
1454
  const RType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
1455

N
nhzlx 已提交
1456
  const RType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
1457

N
nhzlx 已提交
1458
  const RType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
1459 1460 1461 1462 1463 1464 1465

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1466
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
E
eclipsess 已提交
1467

N
nhzlx 已提交
1468
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
E
eclipsess 已提交
1469

N
nhzlx 已提交
1470
  const RType *NewScale() const { return new_scale_; }
E
eclipsess 已提交
1471

N
nhzlx 已提交
1472
  const RType *NewBias() const { return new_bias_; }
E
eclipsess 已提交
1473 1474

 protected:
N
nhzlx 已提交
1475
  RType *bias_;
E
eclipsess 已提交
1476
  int axis_;
N
nhzlx 已提交
1477 1478 1479 1480 1481
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
E
eclipsess 已提交
1482 1483 1484
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1485 1486
  RType *new_bias_;
  RType *new_scale_;
Z
zhangyang 已提交
1487 1488 1489
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1490
  fpga::WrapperConvArgs fpga_conv_args;
Z
zhangyang 已提交
1491 1492

 public:
Z
zhangyang 已提交
1493 1494
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
1495 1496 1497 1498 1499 1500
#endif
};
#endif

#ifdef FUSION_CONVBNADDRELU_OP
template <typename Dtype>
1501
class FusionConvBNAddReluParam : public ConvParam<Dtype> {
1502 1503 1504 1505 1506 1507
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvBNAddReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521
                           const AttributeMap &attrs, const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    bias_ = OpParam::InputYFrom<GType>(inputs, scope);
    axis_ = OpParam::GetAttr<int>("axis", attrs);
    output_ = OpParam::OutFrom<GType>(outputs, scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, scope);
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    keyBNY_ = OpParam::getkey("BNY", inputs, 0);
    keyX_ = OpParam::getkey("X", inputs, 0);
    keyY_ = OpParam::getkey("Y", inputs, 0);
1522
    if (keyX_ == keyBNY_) {
1523
      bias_ = OpParam::InputYFrom<GType>(inputs, scope);
1524
    } else if (keyY_ == keyBNY_) {
1525
      bias_ = OpParam::InputXFrom<GType>(inputs, scope);
1526
    }
1527
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575
  }
  RType *Bias() const { return bias_; }

  const int &Axis() const { return axis_; }

  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }

  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }

  const RType *NewScale() const { return new_scale_; }

  const RType *NewBias() const { return new_bias_; }

 protected:
  RType *bias_;
  int axis_;
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
  RType *new_bias_;
  RType *new_scale_;
  std::string keyBNY_;
  std::string keyX_;
  std::string keyY_;
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1576
  fpga::WrapperConvArgs fpga_conv_args;
1577 1578

 public:
Z
zhangyang 已提交
1579 1580
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1581
#endif
E
eclipsess 已提交
1582
};
1583
#endif
E
eclipsess 已提交
1584

Z
zhangyang 已提交
1585
#ifdef FUSION_CONVBN_OP
N
nhzlx 已提交
1586
template <typename Dtype>
1587
class FusionConvBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
1588 1589 1590
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Z
zhangyang 已提交
1591 1592 1593
 public:
  FusionConvBNParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
1594 1595 1596 1597 1598 1599 1600 1601 1602 1603
                    const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    output_y_ = OpParam::OutputYFrom<GType>(outputs, scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, scope);
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
Z
zhangyang 已提交
1604
  }
N
nhzlx 已提交
1605
  RType *Output() const { return output_y_; }
Z
zhangyang 已提交
1606

N
nhzlx 已提交
1607
  const RType *InputBias() const { return input_bias_; }
Z
zhangyang 已提交
1608

N
nhzlx 已提交
1609
  const RType *InputMean() const { return input_mean_; }
Z
zhangyang 已提交
1610

N
nhzlx 已提交
1611
  const RType *InputScale() const { return input_scale_; }
Z
zhangyang 已提交
1612

N
nhzlx 已提交
1613
  const RType *InputVariance() const { return input_variance_; }
Z
zhangyang 已提交
1614 1615 1616 1617 1618 1619 1620

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1621
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
Z
zhangyang 已提交
1622

N
nhzlx 已提交
1623
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
Z
zhangyang 已提交
1624

N
nhzlx 已提交
1625
  const RType *NewScale() const { return new_scale_; }
Z
zhangyang 已提交
1626

N
nhzlx 已提交
1627
  const RType *NewBias() const { return new_bias_; }
Z
zhangyang 已提交
1628 1629

 protected:
N
nhzlx 已提交
1630 1631 1632 1633 1634
  RType *output_y_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
Z
zhangyang 已提交
1635 1636 1637
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1638 1639
  RType *new_bias_;
  RType *new_scale_;
Z
zhangyang 已提交
1640 1641 1642
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1643
  fpga::WrapperConvArgs fpga_conv_args;
Z
zhangyang 已提交
1644 1645

 public:
Z
zhangyang 已提交
1646 1647
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1648 1649 1650 1651
#endif
};
#endif

1652
#ifdef FUSION_CONVADDBN_OP
N
nhzlx 已提交
1653
template <typename Dtype>
1654
class FusionConvAddBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
1655 1656 1657
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

1658 1659 1660
 public:
  FusionConvAddBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672
                       const AttributeMap &attrs, const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    bias_ = OpParam::InputYFrom<GType>(inputs, scope);
    axis_ = OpParam::GetAttr<int>("axis", attrs);
    output_y_ = OpParam::OutputYFrom<GType>(outputs, scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, scope);
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
1673
  }
N
nhzlx 已提交
1674
  RType *Bias() const { return bias_; }
1675 1676 1677

  const int &Axis() const { return axis_; }

N
nhzlx 已提交
1678
  RType *Output() const { return output_y_; }
1679

N
nhzlx 已提交
1680
  const RType *InputBias() const { return input_bias_; }
1681

N
nhzlx 已提交
1682
  const RType *InputMean() const { return input_mean_; }
1683

N
nhzlx 已提交
1684
  const RType *InputScale() const { return input_scale_; }
1685

N
nhzlx 已提交
1686
  const RType *InputVariance() const { return input_variance_; }
1687 1688 1689 1690 1691 1692 1693

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1694
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
1695

N
nhzlx 已提交
1696
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
1697

N
nhzlx 已提交
1698
  const RType *NewScale() const { return new_scale_; }
1699

N
nhzlx 已提交
1700
  const RType *NewBias() const { return new_bias_; }
1701 1702

 protected:
N
nhzlx 已提交
1703
  RType *bias_;
1704
  int axis_;
N
nhzlx 已提交
1705 1706 1707 1708 1709
  RType *output_y_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
1710 1711 1712
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1713 1714
  RType *new_bias_;
  RType *new_scale_;
Z
zhangyang 已提交
1715 1716 1717
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1718
  fpga::WrapperConvArgs fpga_conv_args;
Z
zhangyang 已提交
1719 1720

 public:
Z
zhangyang 已提交
1721 1722
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1723
#endif
1724
};
E
eclipsess 已提交
1725
#endif
Y
Yao,kun 已提交
1726

E
eclipsess 已提交
1727
#ifdef FUSION_DWCONVBNRELU_OP
N
nhzlx 已提交
1728
template <typename Dtype>
1729
class FusionDWConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
1730 1731 1732
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1733 1734 1735
 public:
  FusionDWConvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
1736 1737 1738 1739 1740 1741 1742 1743 1744 1745
                          const AttributeMap &attrs, const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    output_ = OpParam::OutFrom<GType>(outputs, scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, scope);
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
E
eclipsess 已提交
1746
  }
N
nhzlx 已提交
1747
  RType *Output() const { return output_; }
E
eclipsess 已提交
1748

N
nhzlx 已提交
1749
  const RType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
1750

N
nhzlx 已提交
1751
  const RType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
1752

N
nhzlx 已提交
1753
  const RType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
1754

N
nhzlx 已提交
1755
  const RType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
1756 1757 1758 1759 1760 1761 1762

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1763
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
E
eclipsess 已提交
1764

N
nhzlx 已提交
1765
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
E
eclipsess 已提交
1766

N
nhzlx 已提交
1767
  const RType *NewScale() const { return new_scale_; }
E
eclipsess 已提交
1768

N
nhzlx 已提交
1769
  const RType *NewBias() const { return new_bias_; }
E
eclipsess 已提交
1770 1771

 protected:
N
nhzlx 已提交
1772 1773 1774 1775 1776
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
E
eclipsess 已提交
1777 1778 1779
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1780 1781
  RType *new_bias_;
  RType *new_scale_;
E
eclipsess 已提交
1782 1783 1784 1785
};

#endif

1786
#ifdef FUSION_CONVBNRELU_OP
N
nhzlx 已提交
1787
template <typename Dtype>
1788
class FusionConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
1789 1790 1791
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

1792 1793 1794
 public:
  FusionConvBNReluParam(const VariableNameMap &inputs,
                        const VariableNameMap &outputs,
1795 1796 1797 1798 1799 1800 1801 1802 1803 1804
                        const AttributeMap &attrs, const Scope &scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    output_ = OpParam::OutFrom<GType>(outputs, scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, scope);
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
1805
  }
N
nhzlx 已提交
1806
  RType *Output() const { return output_; }
1807

N
nhzlx 已提交
1808
  const RType *InputBias() const { return input_bias_; }
1809

N
nhzlx 已提交
1810
  const RType *InputMean() const { return input_mean_; }
1811

N
nhzlx 已提交
1812
  const RType *InputScale() const { return input_scale_; }
1813

N
nhzlx 已提交
1814
  const RType *InputVariance() const { return input_variance_; }
1815 1816 1817 1818 1819 1820 1821

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

N
nhzlx 已提交
1822
  void SetNewScale(RType *new_scale) { new_scale_ = new_scale; }
1823

N
nhzlx 已提交
1824
  void SetNewBias(RType *new_bias) { new_bias_ = new_bias; }
1825

N
nhzlx 已提交
1826
  const RType *NewScale() const { return new_scale_; }
1827

N
nhzlx 已提交
1828
  const RType *NewBias() const { return new_bias_; }
1829 1830

 protected:
N
nhzlx 已提交
1831 1832 1833 1834 1835
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
1836 1837 1838
  float epsilon_;
  float momentum_;
  bool is_test_;
N
nhzlx 已提交
1839 1840
  RType *new_bias_;
  RType *new_scale_;
Z
zhangyang 已提交
1841 1842 1843
#ifdef PADDLE_MOBILE_FPGA

 private:
Z
zhangyang 已提交
1844
  fpga::WrapperConvArgs fpga_conv_args;
Z
zhangyang 已提交
1845 1846

 public:
Z
zhangyang 已提交
1847 1848
  const fpga::WrapperConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::WrapperConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1849
#endif
1850 1851 1852
};
#endif

Y
Yao,kun 已提交
1853
#ifdef IM2SEQUENCE_OP
N
nhzlx 已提交
1854
template <typename Dtype>
Y
Yao,kun 已提交
1855
class Im2SequenceParam : public OpParam {
N
nhzlx 已提交
1856 1857 1858
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
1859 1860 1861 1862
 public:
  Im2SequenceParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
                   const Scope &scope) {
N
nhzlx 已提交
1863 1864
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
Y
Yao,kun 已提交
1865 1866 1867 1868 1869
    kernels_ = GetAttr<vector<int>>("kernels", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
  }

N
nhzlx 已提交
1870
  const RType *Input() const { return input_x_; }
Y
Yao,kun 已提交
1871

N
nhzlx 已提交
1872
  RType *Output() const { return out_; }
Y
Yao,kun 已提交
1873 1874 1875 1876 1877 1878 1879 1880

  const vector<int> &Kernels() const { return kernels_; }

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

 private:
N
nhzlx 已提交
1881 1882
  RType *input_x_;
  RType *out_;
Y
Yao,kun 已提交
1883 1884 1885 1886
  vector<int> kernels_;
  vector<int> strides_;
  vector<int> paddings_;
};
1887
#endif
Y
Yao,kun 已提交
1888

1889
#ifdef DROPOUT_OP
N
nhzlx 已提交
1890
template <typename Dtype>
Y
Yao,kun 已提交
1891
class DropoutParam : public OpParam {
N
nhzlx 已提交
1892 1893 1894
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
1895 1896 1897
 public:
  DropoutParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
               const AttributeMap &attrs, const Scope &scope) {
N
nhzlx 已提交
1898 1899
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
Y
yangfei 已提交
1900 1901

    dropout_prob_ = GetAttr<float>("dropout_prob", attrs);
Y
Yao,kun 已提交
1902 1903
  }

N
nhzlx 已提交
1904
  const RType *InputX() const { return input_x_; }
Y
Yao,kun 已提交
1905

N
nhzlx 已提交
1906
  RType *Out() const { return out_; }
Y
Yao,kun 已提交
1907

Y
yangfei 已提交
1908 1909
  float DropoutProb() const { return dropout_prob_; }

Y
Yao,kun 已提交
1910
 private:
N
nhzlx 已提交
1911 1912
  RType *input_x_;
  RType *out_;
Y
yangfei 已提交
1913
  float dropout_prob_;
Y
Yao,kun 已提交
1914
};
1915
#endif
Y
Yao,kun 已提交
1916

H
hjchen2 已提交
1917
#ifdef CONV_TRANSPOSE_OP
N
nhzlx 已提交
1918
template <typename Dtype>
L
liuruilong 已提交
1919
class ConvTransposeParam : public OpParam {
N
nhzlx 已提交
1920 1921 1922
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1923 1924 1925 1926
 public:
  ConvTransposeParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
                     const Scope &scope) {
N
nhzlx 已提交
1927 1928 1929
    filter_ = FilterFrom<GType>(inputs, scope);
    input_ = InputFrom<GType>(inputs, scope);
    output_ = OutputFrom<GType>(outputs, scope);
L
liuruilong 已提交
1930 1931 1932 1933 1934 1935
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
    groups = GetAttr<int>("groups", attrs);
  }

N
nhzlx 已提交
1936
  const RType *Input() const { return input_; }
L
liuruilong 已提交
1937

N
nhzlx 已提交
1938
  const RType *Filter() const { return filter_; }
L
liuruilong 已提交
1939

N
nhzlx 已提交
1940
  RType *Output() const { return output_; }
L
liuruilong 已提交
1941 1942 1943 1944 1945 1946 1947 1948 1949 1950

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

  const vector<int> &Dilations() const { return dilations_; }

  const int &Groups() const { return groups; }

 private:
N
nhzlx 已提交
1951 1952 1953
  RType *input_;
  RType *output_;
  RType *filter_;
L
liuruilong 已提交
1954 1955 1956 1957 1958 1959 1960
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
  int groups;
};
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985
#ifdef GRU_OP
template <typename Dtype>
class GruParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  /**
   *
   * @param inputs
   * @param outputs
   * @param attrs
   * @param scope
   * */
  GruParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
           const AttributeMap &attrs, const Scope &scope) {
    input_input_ = InputFrom<GType>(inputs, scope);
    input_h0_ = InputH0From<GType>(inputs, scope);
    input_bias_ = InputBiasFrom<GType>(inputs, scope);
    input_weight_ = InputWeightFrom<GType>(inputs, scope);

    output_batch_gate_ = OutputBatchGateFrom<GType>(outputs, scope);
    output_batch_reset_hidden_prev_ =
        OutputBatchResetHiddenPrevFrom<GType>(outputs, scope);
    output_batch_hidden_ = OutputBatchHiddenFrom<GType>(outputs, scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, scope);
1986 1987
    activation_ = GetStringAttr("activation", attrs);
    gate_activation_ = GetStringAttr("gate_activation", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020
    is_reverse_ = GetAttr<bool>("is_reverse", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputH0() const { return input_h0_; }
  const GType *InputBias() const { return input_bias_; }
  const std::string &Activation() const { return activation_; }
  const std::string &GateActivation() const { return gate_activation_; }
  const bool &IsReverse() const { return is_reverse_; }

  GType *OutBatchGate() const { return output_batch_gate_; }
  GType *OutBatchResetHiddenPrev() const {
    return output_batch_reset_hidden_prev_;
  }
  GType *OutBatchHidden() const { return output_batch_hidden_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_h0_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_batch_gate_;
  GType *output_batch_reset_hidden_prev_;
  GType *output_batch_hidden_;
  GType *output_hidden_;
  std::string activation_;
  std::string gate_activation_;
  bool is_reverse_;
};
#endif

2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031
#ifdef FLATTEN_OP
template <typename Dtype>
class FlattenParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FlattenParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
               const AttributeMap &attrs, const Scope &scope) {
    input_x_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2032
    axis = GetAttr<int>("axis", attrs);
2033 2034 2035
  }
  const RType *InputX() const { return input_x_; }
  RType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
2036
  const int &Axis() const { return axis; }
2037 2038 2039 2040

 private:
  RType *input_x_;
  RType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
2041
  int axis;
2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054
};
#endif

#ifdef SPLIT_OP
template <typename Dtype>
class SplitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SplitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, const Scope &scope) {
    input_x_ = InputXFrom<GType>(inputs, scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2055
    outs_ = OutMultiFrom<GType>(outputs, scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2056
    axis = GetAttr<int>("axis", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
2057 2058 2059 2060 2061 2062
    num = GetAttr<int>("num", attrs);
    sections = GetAttr<std::vector<int>>("sections", attrs);

    //    for (int i = 0; i < outs_.size(); ++i) {
    //      out_ts_.push_back(*scope.FindVar(outs_[i])->GetMutable());
    //    }
2063 2064
  }
  const RType *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
2065 2066 2067 2068 2069
  std::vector<GType *> Outs() const { return outs_; }
  int Axis() const { return axis; }
  int Num() const { return num; }
  std::vector<int> Sections() const { return sections; }
  //  std::vector<GType> OutTs() const { return out_ts_; }
2070 2071 2072

 private:
  RType *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
2073
  std::vector<GType *> outs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
2074
  int axis;
xiebaiyuan's avatar
xiebaiyuan 已提交
2075 2076 2077
  int num;
  std::vector<int> sections;
  //  std::vector<GType> out_ts_;
2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093
};
#endif

#ifdef BILINEAR_INTERP_OP
template <typename Dtype>
class BilinearInterpParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  BilinearInterpParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
                      const Scope &scope) {
    input_x_ = InputXFrom<GType>(inputs, scope);
    input_outsize_ = InputOutSizeFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2094 2095
    out_h_ = GetAttr<int>("out_h", attrs);
    out_w_ = GetAttr<int>("out_w", attrs);
2096 2097
  }
  const RType *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
2098
  const RType *InputOutPutSize() const { return input_outsize_; }
2099
  RType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
2100 2101
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }
2102 2103 2104 2105 2106

 private:
  RType *input_x_;
  RType *input_outsize_;
  RType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
2107 2108
  int out_h_;
  int out_w_;
2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123
};
#endif

#ifdef SHAPE_OP
template <typename Dtype>
class ShapeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ShapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, const Scope &scope) {
    input_ = InputFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
2124
  const RType *Input() const { return input_; }
2125 2126 2127 2128 2129 2130 2131 2132
  RType *Out() const { return out_; }

 private:
  RType *input_;
  RType *out_;
};
#endif

2133
template <typename Dtype>
2134 2135 2136 2137 2138
class QuantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
2139 2140
  QuantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                const AttributeMap &attrs, const Scope &scope) {
2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174
    input_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
    if (HasAttr("is_static", attrs)) {
      is_static_ = GetAttr<bool>("is_static", attrs);
    }
    // online
    // scale = max(abs(x))
    online_scale_ = GetVarValue<GType>("OutScale", outputs, scope);
    // offline
    if (HasAttr("static_scale", attrs)) {
      static_scale_ = GetAttr<float>("static_scale", attrs);
    }
    // x = round(scale * x)
    if (HasAttr("round_type", attrs)) {
      round_type_ = GetAttr<RoundType>("round_type", attrs);
    }
  }

 public:
  // op input
  RType *input_;
  // op output
  RType *out_;
  //
  RType *online_scale_;
  // if static scale or not
  bool is_static_ = false;
  // quantize scale
  float static_scale_ = 1.0f;
  // round method type
  // nearest_zero and nearest_even is valid currently
  RoundType round_type_ = ROUND_NEAREST_TO_EVEN;
};

2175
template <typename Dtype>
2176 2177 2178 2179 2180
class DequantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
2181 2182
  DequantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                  const AttributeMap &attrs, const Scope &scope) {
2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202
    input_ = InputXFrom<GType>(inputs, scope);
    out_ = OutFrom<GType>(outputs, scope);
    activation_scale_ = GetVarValue<GType>("Scale", inputs, scope);
    // dequantization is performed as x = x / static_scale / online_scale
    if (HasAttr("weight_scale", attrs)) {
      weight_scale_ = GetAttr<float>("weight_scale", attrs);
    } else {
      weight_scale_ = GetAttr<float>("max_range", attrs);
    }
  }

 public:
  // op input
  RType *input_;
  // op output
  RType *out_;
  RType *activation_scale_;
  float weight_scale_;
};

朔-望's avatar
朔-望 已提交
2203 2204
}  // namespace operators
}  // namespace paddle_mobile