op_param.h 114.2 KB
Newer Older
W
wangliu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
朔-望's avatar
朔-望 已提交
14

15
#pragma once
朔-望's avatar
朔-望 已提交
16

17
#include <memory>
E
eclipsess 已提交
18
#include <string>
W
wangliu 已提交
19
#include <vector>
L
liuruilong 已提交
20
#include "common/log.h"
朔-望's avatar
朔-望 已提交
21
#include "common/type_define.h"
N
nhzlx 已提交
22
#include "common/types.h"
23
#include "framework/attribute.h"
朔-望's avatar
朔-望 已提交
24 25 26
#include "framework/lod_tensor.h"
#include "framework/scope.h"
#include "framework/tensor.h"
27
#include "framework/type_trait.h"
朔-望's avatar
朔-望 已提交
28
#include "framework/variable.h"
Z
zhangyang 已提交
29 30 31 32 33 34 35

#ifdef PADDLE_MOBILE_FPGA_V1
#include "fpga/V1/api.h"
#endif

#ifdef PADDLE_MOBILE_FPGA_V2
#include "fpga/V2/api.h"
Z
zhangyang 已提交
36
#endif
朔-望's avatar
朔-望 已提交
37

C
Chon 已提交
38 39 40 41
#ifdef PADDLE_MOBILE_FPGA_KD
#include "fpga/KD/context.hpp"
#endif

L
liuruilong 已提交
42 43
#ifdef PADDLE_MOBILE_CL
#include "framework/cl/cl_image.h"
Z
zhangyang 已提交
44
#endif
朔-望's avatar
朔-望 已提交
45 46

namespace paddle_mobile {
朔-望's avatar
朔-望 已提交
47 48
namespace operators {

W
wangliu 已提交
49 50 51 52 53
using framework::Attribute;
using framework::AttributeMap;
using framework::LoDTensor;
using framework::Scope;
using framework::Tensor;
E
eclipsess 已提交
54
using framework::Variable;
W
wangliu 已提交
55 56
using std::string;
using std::vector;
朔-望's avatar
朔-望 已提交
57

58
using framework::DtypeTensorTrait;
L
liuruilong 已提交
59

60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
template <typename Dtype>
class CLImageDeleter {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  void operator()(GType *ptr) {
#ifdef PADDLE_MOBILE_CL
    framework::CLImage *image = dynamic_cast<framework::CLImage *>(ptr);
    if (image) {
      delete image;
    }
#endif
  }
};

L
liuruilong 已提交
75
class OpParam {
76 77
 public:
  OpParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
78 79
          const AttributeMap &attrs, Scope *scope)
      : scope_(scope) {}
80

81 82
  Scope *GetScope() const { return scope_; }
  Scope *scope_ = nullptr;
83

C
Chon 已提交
84 85 86 87 88 89
#ifdef PADDLE_MOBILE_FPGA_KD
  zynqmp::Context &context() { return context_; }

  zynqmp::Context context_;
#endif

朔-望's avatar
朔-望 已提交
90
 protected:
xiebaiyuan's avatar
xiebaiyuan 已提交
91 92 93 94
  template <typename T>
  static T *InputH0From(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("H0", inputs, scope);
  }
Z
zhaojiaying01 已提交
95 96 97 98 99 100 101

  template <typename T>
  static T *InputHiddenPrevFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("HiddenPrev", inputs, scope);
  }

102 103 104 105 106
  template <typename T>
  static T *InputAlphaFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Alpha", inputs, scope);
  }

107 108 109 110 111 112 113 114 115
  template <typename T>
  static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Input", inputs, scope);
  }

  template <typename T>
  static T *InputXFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("X", inputs, scope);
  }
116 117 118 119 120
  template <typename T>
  static T *InputOutSizeFrom(const VariableNameMap &inputs,
                             const Scope &scope) {
    return GetVarValue<T>("OutSize", inputs, scope);
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147

  template <typename T>
  static T *InputWFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("W", inputs, scope);
  }

  template <typename T>
  static T *InputIdsFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Ids", inputs, scope);
  }

  template <typename T>
  static T *InputEmissionFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Emission", inputs, scope);
  }

  template <typename T>
  static T *InputTransitionFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("Transition", inputs, scope);
  }
  template <typename T>
  static T *InputLabelFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Label", inputs, scope);
  }

148 149 150 151
  template <typename T>
  static T *InputXFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("addX", inputs, scope);
  }
152 153 154 155 156 157

  template <typename T>
  static T *InputYFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Y", inputs, scope);
  }

158 159 160 161 162
  template <typename T>
  static T *InputYFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("Y", inputs, scope);
  }

E
eclipsess 已提交
163 164 165 166 167
  template <typename T>
  static T *InputZFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Z", inputs, scope);
  }

168 169 170 171 172
  template <typename T>
  static T *InputBiasFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Bias", inputs, scope);
  }
  template <typename T>
xiebaiyuan's avatar
xiebaiyuan 已提交
173 174 175 176
  static T *InputWeightFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Weight", inputs, scope);
  }
  template <typename T>
177 178 179 180 181 182 183 184 185 186 187 188
  static T *InputVarianceFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Variance", inputs, scope);
  }
  template <typename T>
  static T *InputMeanFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Mean", inputs, scope);
  }
  template <typename T>
  static T *InputScaleFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scale", inputs, scope);
  }
E
eclipsess 已提交
189 190 191 192
  template <typename T>
  static T *InputImageFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Image", inputs, scope);
  }
E
eclipsess 已提交
193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
  template <typename T>
  static T *InputPriorBoxFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("PriorBox", inputs, scope);
  }
  template <typename T>
  static T *InputPriorBoxVarFrom(const VariableNameMap &inputs,
                                 const Scope &scope) {
    return GetVarValue<T>("PriorBoxVar", inputs, scope);
  }
  // LoDTensor but now use Tensor
  template <typename T>
  static T *InputTargetBoxFrom(const VariableNameMap &inputs,
                               const Scope &scope) {
    return GetVarValue<T>("TargetBox", inputs, scope);
  }
209

E
eclipsess 已提交
210 211 212 213 214 215 216 217 218 219
  template <typename T>
  static T *InputBBoxesFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("BBoxes", inputs, scope);
  }

  template <typename T>
  static T *InputScoresFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scores", inputs, scope);
  }

E
eclipsess 已提交
220 221 222 223
  template <typename T>
  static T *InputShapeFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Shape", inputs, scope);
  }
E
eclipsess 已提交
224

225
  template <typename T>
W
wangliu 已提交
226 227
  static vector<T *> InputMultiFrom(const VariableNameMap &inputs,
                                    const Scope &scope) {
228 229 230
    return GetMultiVarValue<T>("X", inputs, scope);
  }

E
eclipsess 已提交
231 232 233 234 235
  static vector<Variable *> InputMultiVarsFrom(const VariableNameMap &inputs,
                                               const Scope &scope) {
    return GetMultiVar("X", inputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
236 237 238 239 240 241
  template <typename T>
  static T *OutputBatchGateFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("BatchGate", outputs, scope);
  }

Z
zhaojiaying01 已提交
242 243 244 245 246
  template <typename T>
  static T *OutputGateFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Gate", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
247 248 249 250 251 252 253 254 255 256 257
  template <typename T>
  static T *OutputViterbiPathFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("ViterbiPath", outputs, scope);
  }
  template <typename T>
  static T *OutputBatchResetHiddenPrevFrom(const VariableNameMap &outputs,
                                           const Scope &scope) {
    return GetVarValue<T>("BatchResetHiddenPrev", outputs, scope);
  }

Z
zhaojiaying01 已提交
258 259 260 261 262 263
  template <typename T>
  static T *OutputResetHiddenPrevFrom(const VariableNameMap &outputs,
                                      const Scope &scope) {
    return GetVarValue<T>("ResetHiddenPrev", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
264 265 266 267 268 269 270 271 272 273 274 275
  template <typename T>
  static T *OutputBatchHiddenFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("BatchHidden", outputs, scope);
  }

  template <typename T>
  static T *OutputHiddenFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("Hidden", outputs, scope);
  }

276 277 278 279 280
  template <typename T>
  static T *OutputFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Output", outputs, scope);
  }

E
eclipsess 已提交
281 282 283 284 285
  static Variable *OutVarFrom(const VariableNameMap &outputs,
                              const Scope &scope) {
    return GetVar("Out", outputs, scope);
  }

286 287 288 289 290
  template <typename T>
  static T *OutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Out", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
291 292 293 294 295 296
  template <typename T>
  static vector<T *> OutMultiFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetMultiVarValue<T>("Out", outputs, scope);
  }

297 298 299 300 301
  template <typename T>
  static T *OutputYFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Y", outputs, scope);
  }

L
lijiancheng0614 已提交
302 303 304 305 306 307
  template <typename T>
  static T *OutputXShapeFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("XShape", outputs, scope);
  }

E
eclipsess 已提交
308 309 310 311 312 313
  template <typename T>
  static T *OutputBoxesFrom(const VariableNameMap &outputs,
                            const Scope &scope) {
    return GetVarValue<T>("Boxes", outputs, scope);
  }

E
eclipsess 已提交
314 315 316 317 318
  template <typename T>
  static T *OutputBoxFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("OutputBox", outputs, scope);
  }

Z
zhaojiaying01 已提交
319 320 321 322 323
  template <typename T>
  static T *OutputNormFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Norm", outputs, scope);
  }

E
eclipsess 已提交
324 325 326 327 328 329
  template <typename T>
  static T *OutputVariancesFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("Variances", outputs, scope);
  }

330 331 332 333 334 335 336 337 338 339
  template <typename T>
  static T *MidOutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("MidOut", outputs, scope);
  }

  template <typename T>
  static T *FilterFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Filter", inputs, scope);
  }

340 341 342 343 344
  template <typename T>
  static T *GridFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Grid", inputs, scope);
  }

345
  template <typename T>
W
wangliu 已提交
346
  static const T GetAttr(const string &key, const AttributeMap &map) {
347 348
    PADDLE_MOBILE_ENFORCE(HasAttr(key, map), "%s is not contained in attr map",
                          key.c_str())
349 350
    return ((Attribute)map.at(key)).Get<T>();
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
351 352
  static const std::string GetStringAttr(const string &key,
                                         const AttributeMap &map) {
353 354
    PADDLE_MOBILE_ENFORCE(HasAttr(key, map), "%s is not contained in attr map",
                          key.c_str())
355 356
    return ((Attribute)map.at(key)).GetString();
  }
357

358 359 360 361
  static const bool HasAttr(const string &key, const AttributeMap &map) {
    return map.count(key) > 0;
  }

362 363 364 365
  static const bool HasVar(const string &key, const VariableNameMap &var_map) {
    return var_map.count(key) > 0;
  }

366
  template <typename T>
W
wangliu 已提交
367
  static T *GetVarValue(const string &key, const VariableNameMap &var_map,
368
                        const Scope &scope) {
W
wangliu 已提交
369 370
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
371 372 373 374 375 376
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
朔-望's avatar
朔-望 已提交
377
    }
378
  }
朔-望's avatar
朔-望 已提交
379

E
eclipsess 已提交
380 381 382 383 384 385 386 387 388 389 390 391 392
  static Variable *GetVar(const string &key, const VariableNameMap &var_map,
                          const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var;
    } else {
      return nullptr;
    }
  }

393
  static std::string Getkey(const string &key, const VariableNameMap &var_map,
394
                            int index) {
395 396
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > index,
                          "%s is not contained in var_map", key.c_str())
397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414
    auto var_vec = var_map.at(key);
    return var_vec[index];
  }

  template <typename T>
  static T *GetVarValue1(const string &key, const VariableNameMap &var_map,
                         const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[1]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
    }
  }

415
  template <typename T>
W
wangliu 已提交
416 417 418
  static vector<T *> GetMultiVarValue(const string &key,
                                      const VariableNameMap &var_map,
                                      const Scope &scope) {
419 420
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
W
wangliu 已提交
421
    vector<T *> var_res;
422 423 424
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var->GetMutable<T>());
朔-望's avatar
朔-望 已提交
425
    }
426 427
    return var_res;
  }
E
eclipsess 已提交
428 429 430 431 432 433 434 435 436 437 438 439 440

  static vector<Variable *> GetMultiVar(const string &key,
                                        const VariableNameMap &var_map,
                                        const Scope &scope) {
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
    vector<Variable *> var_res;
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var);
    }
    return var_res;
  }
朔-望's avatar
朔-望 已提交
441 442
};

443 444 445 446 447 448
#define GET_VAR_AS_TENSOR(name, name_dict, scope) \
  OpParam::GetVarValue<framework::Tensor>(name, name_dict, scope)

#define GET_VAR_AS_LOD_TENSOR(name, name_dict, scope) \
  OpParam::GetVarValue<framework::LoDTensor>(name, name_dict, scope)

N
nhzlx 已提交
449
template <typename Dtype>
450
class ConvParam : public OpParam {
N
nhzlx 已提交
451 452 453
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
454
 public:
455
  ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
456 457 458 459
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    filter_ = OpParam::FilterFrom<GType>(inputs, *scope);
    input_ = OpParam::InputFrom<GType>(inputs, *scope);
460
    if (outputs.count("Output")) {
461
      output_ = OpParam::OutputFrom<GType>(outputs, *scope);
462 463 464 465 466
    }
    strides_ = OpParam::GetAttr<vector<int>>("strides", attrs);
    paddings_ = OpParam::GetAttr<vector<int>>("paddings", attrs);
    dilations_ = OpParam::GetAttr<vector<int>>("dilations", attrs);
    groups = OpParam::GetAttr<int>("groups", attrs);
467
  }
朔-望's avatar
朔-望 已提交
468

469
  const GType *Input() const { return input_; }
朔-望's avatar
朔-望 已提交
470

471
  GType *Filter() const { return filter_; }
朔-望's avatar
朔-望 已提交
472

473
  GType *Output() const { return output_; }
朔-望's avatar
朔-望 已提交
474

W
wangliu 已提交
475
  const vector<int> &Strides() const { return strides_; }
朔-望's avatar
朔-望 已提交
476

W
wangliu 已提交
477
  const vector<int> &Paddings() const { return paddings_; }
朔-望's avatar
朔-望 已提交
478

W
wangliu 已提交
479
  const vector<int> &Dilations() const { return dilations_; }
朔-望's avatar
朔-望 已提交
480

H
hjchen2 已提交
481 482 483
  enum ExecMode {
    EXEC_INVALID = 0,
    EXEC_GEMM_FLOAT,
484 485
    EXEC_DEPTHWISE3x3S1_FLOAT,
    EXEC_DEPTHWISE3x3S2_FLOAT,
H
hjchen2 已提交
486 487
    EXEC_WINOGRAD3X3_FLOAT,
    EXEC_WINOGRAD5X5_FLOAT,
488
    EXEC_DEPTHWISE5x5_FLOAT,
H
hjchen2 已提交
489
    EXEC_GEMM_INT8,
H
hjchen2 已提交
490
    EXEC_DEPTHWISE3x3_INT8,
491
    EXEC_DEPTHWISE5x5_INT8,
S
StarryRain 已提交
492 493
    EXEC_SLIDINGWINDOW3x3S1_FLOAT,
    EXEC_SLIDINGWINDOW3x3S2_FLOAT,
494 495 496
    EXEC_DEPTHWISE3x3_FLOAT,
    EXEC_SLIDINGWINDOW1x1_FLOAT,
    EXEC_SLIDINGWINDOW3x3_FLOAT,
X
xiebaiyuan 已提交
497
    EXEC_SLIDINGWINDOW3x3_WITH_GROUP_FLOAT,
498 499
    EXEC_SLIDINGWINDOW5x5_FLOAT,
    EXEC_SLIDINGWINDOW7x7_FLOAT,
500
    EXEC_GEMM1x1s1_FLOAT,
501
    EXEC_DEPTHWISEBASIC_FLOAT,
H
hjchen2 已提交
502 503 504 505
  };

  ExecMode &ExecMode() const { return exec_mode_; }

506
  const int &Groups() const { return groups; }
朔-望's avatar
朔-望 已提交
507

508 509 510 511 512 513 514
#ifdef PADDLE_MOBILE_CL
  int Offset() const { return offset_; }

  int SetOffset(int in_offset) { offset_ = in_offset; }

#endif

H
hjchen2 已提交
515
 public:
516 517 518 519
  GType *input_;
  GType *output_;
  GType *filter_;
  GType *transformed_filter_;
W
wangliu 已提交
520 521 522
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
H
hjchen2 已提交
523
  mutable enum ExecMode exec_mode_;
524
  int groups;
525 526 527 528

#ifdef PADDLE_MOBILE_CL
  int offset_;
#endif
Z
zhangyang 已提交
529 530 531

#ifdef PADDLE_MOBILE_FPGA

H
hjchen2 已提交
532
 public:
Z
zhangyang 已提交
533 534 535 536 537
  fpga::SplitConvArgs fpga_conv_args;

 public:
  const fpga::SplitConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::SplitConvArgs &args) { fpga_conv_args = args; }
538 539 540 541 542 543 544

 public:
  fpga::DWconvArgs fpga_dwconv_args;

 public:
  const fpga::DWconvArgs &FpgaDwconvArgs() const { return fpga_dwconv_args; }
  void SetFpgaArgs(const fpga::DWconvArgs &args) { fpga_dwconv_args = args; }
Z
zhangyang 已提交
545
#endif
朔-望's avatar
朔-望 已提交
546
};
N
nhzlx 已提交
547 548
template <typename Dtype>
Print &operator<<(Print &printer, const ConvParam<Dtype> &conv_param);
朔-望's avatar
朔-望 已提交
549

N
nhzlx 已提交
550
template <typename Dtype>
551
class ElementwiseAddParam : public OpParam {
N
nhzlx 已提交
552 553 554
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
555
 public:
556
  ElementwiseAddParam(const VariableNameMap &inputs,
557
                      const VariableNameMap &outputs, const AttributeMap &attrs,
558 559 560 561 562
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
563 564 565
    axis_ = GetAttr<int>("axis", attrs);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
566
  const GType *InputX() const { return input_x_; }
567

xiebaiyuan's avatar
xiebaiyuan 已提交
568
  const GType *InputY() const { return input_y_; }
569

xiebaiyuan's avatar
xiebaiyuan 已提交
570
  GType *Out() const { return out_; }
571 572 573

  const int &Axis() const { return axis_; }

朔-望's avatar
朔-望 已提交
574
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
575 576 577
  GType *input_x_;
  GType *input_y_;
  GType *out_;
578
  int axis_;
Z
zhangyang 已提交
579 580 581
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
582
  fpga::EWAddArgs fpga_EW_add_args;
Z
zhangyang 已提交
583 584

 public:
H
hanbuhe 已提交
585 586
  const fpga::EWAddArgs &FpgaArgs() const { return fpga_EW_add_args; }
  void SetFpgaArgs(const fpga::EWAddArgs &args) { fpga_EW_add_args = args; }
qnqinan's avatar
qnqinan 已提交
587 588 589 590

 public:
  Tensor float_input_x, float_out;

Z
zhangyang 已提交
591
#endif
朔-望's avatar
朔-望 已提交
592 593
};

E
eclipsess 已提交
594
#ifdef ELEMENTWISEMUL_OP
E
eclipsess 已提交
595
template <typename Dtype>
596
class ElementwiseMulParam : public OpParam {
E
eclipsess 已提交
597 598 599 600 601 602
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ElementwiseMulParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
603 604 605 606 607
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623
    axis_ = GetAttr<int>("axis", attrs);
  }

  const GType *InputX() const { return input_x_; }

  const GType *InputY() const { return input_y_; }

  GType *Out() const { return out_; }

  const int &Axis() const { return axis_; }

 private:
  GType *input_x_;
  GType *input_y_;
  GType *out_;
  int axis_;
qnqinan's avatar
qnqinan 已提交
624 625 626 627 628 629
#ifdef PADDLE_MOBILE_FPGA

 public:
  Tensor float_input_x, float_out;

#endif
E
eclipsess 已提交
630
};
S
suiyang 已提交
631
#endif
E
eclipsess 已提交
632

633
#ifdef FUSION_ELEMENTWISEADDRELU_OP
N
nhzlx 已提交
634 635
template <typename Dtype>
using ElementwiseAddReluParam = ElementwiseAddParam<Dtype>;
L
liuruilong 已提交
636 637
#endif

638
#ifdef ELEMENTWISESUB_OP
639
template <typename Dtype>
640
class ElementwiseSubParam : public OpParam {
641 642 643 644 645 646
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ElementwiseSubParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
647 648 649 650 651
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668
    axis_ = GetAttr<int>("axis", attrs);
  }

  const GType *InputX() const { return input_x_; }

  const GType *InputY() const { return input_y_; }

  GType *Out() const { return out_; }

  const int &Axis() const { return axis_; }

 private:
  GType *input_x_;
  GType *input_y_;
  GType *out_;
  int axis_;
};
669
#endif
670

L
liuruilong 已提交
671
#ifdef MUL_OP
N
nhzlx 已提交
672
template <typename Dtype>
673
class MulParam : public OpParam {
N
nhzlx 已提交
674 675 676
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
677
 public:
678
  MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
679 680 681 682 683
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
684 685 686
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
  }
朔-望's avatar
朔-望 已提交
687

688
  GType *InputX() const { return input_x_; }
朔-望's avatar
朔-望 已提交
689

690
  GType *InputY() const { return input_y_; }
朔-望's avatar
朔-望 已提交
691

xiebaiyuan's avatar
xiebaiyuan 已提交
692
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
693

694
  const int &XNumColDims() const { return x_num_col_dims_; }
朔-望's avatar
朔-望 已提交
695

696
  const int &YNumColDims() const { return y_num_col_dims_; }
朔-望's avatar
朔-望 已提交
697

朔-望's avatar
朔-望 已提交
698
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
699 700 701
  GType *input_x_;
  GType *input_y_;
  GType *out_;
702 703
  int x_num_col_dims_;
  int y_num_col_dims_;
朔-望's avatar
朔-望 已提交
704
};
L
liuruilong 已提交
705
#endif
朔-望's avatar
朔-望 已提交
706

L
liuruilong 已提交
707
#ifdef CONCAT_OP
N
nhzlx 已提交
708
template <typename Dtype>
朔-望's avatar
朔-望 已提交
709
class ConcatParam : public OpParam {
N
nhzlx 已提交
710 711 712
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
713
 public:
714
  ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
715 716 717 718
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    inputs_ = InputMultiFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
719
    axis_ = GetAttr<int>("axis", attrs);
720
    original_output_dims_size_ = out_->dims().size();
721
  }
朔-望's avatar
朔-望 已提交
722

N
nhzlx 已提交
723
  vector<GType *> Inputs() const { return inputs_; }
朔-望's avatar
朔-望 已提交
724

xiebaiyuan's avatar
xiebaiyuan 已提交
725
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
726

727
  const int &Axis() const { return axis_; }
朔-望's avatar
朔-望 已提交
728

729
 public:
N
nhzlx 已提交
730
  vector<GType *> inputs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
731
  GType *out_;
732
  int axis_;
733
  int original_output_dims_size_;
Z
zhangyang 已提交
734 735 736 737 738 739 740 741 742
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::ConcatArgs fpga_concat_args;

 public:
  const fpga::ConcatArgs &FpgaArgs() const { return fpga_concat_args; }
  void SetFpgaArgs(const fpga::ConcatArgs &args) { fpga_concat_args = args; }
#endif
朔-望's avatar
朔-望 已提交
743
};
L
liuruilong 已提交
744
#endif
朔-望's avatar
朔-望 已提交
745

E
eclipsess 已提交
746 747 748 749 750 751 752 753
#ifdef SUM_OP
template <typename Dtype>
class SumParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SumParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
754 755 756 757 758 759
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    inputs_vars_ = InputMultiVarsFrom(inputs, *scope);
    out_var_ = OutVarFrom(outputs, *scope);
    inputs_ = InputMultiFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777
  }

  vector<Variable *> InputsVars() const { return inputs_vars_; }

  Variable *OutVar() const { return out_var_; }

  vector<GType *> Inputs() const { return inputs_; }

  GType *Out() const { return out_; }

 private:
  vector<Variable *> inputs_vars_;
  Variable *out_var_;
  vector<GType *> inputs_;
  GType *out_;
};
#endif

L
liuruilong 已提交
778
#ifdef LRN_OP
N
nhzlx 已提交
779
template <typename Dtype>
E
eclipsess 已提交
780
class LrnParam : public OpParam {
N
nhzlx 已提交
781 782 783
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
784
 public:
785
  LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
786 787 788 789 790
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    mid_out_ = MidOutFrom<GType>(outputs, *scope);
791 792 793 794
    n_ = GetAttr<int>("n", attrs);
    alpha_ = GetAttr<float>("alpha", attrs);
    beta_ = GetAttr<float>("beta", attrs);
    k_ = GetAttr<float>("k", attrs);
795
    data_format_ = GetStringAttr("data_format", attrs);
796
  }
E
eclipsess 已提交
797

798
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
799

800
  GType *Out() const { return out_; }
E
eclipsess 已提交
801

802
  GType *MidOut() const { return mid_out_; }
E
eclipsess 已提交
803

804
  const int &N() const { return n_; }
E
eclipsess 已提交
805

806
  const float &Alpha() const { return alpha_; }
E
eclipsess 已提交
807

808
  const float &Beta() const { return beta_; }
E
eclipsess 已提交
809

810
  const float &K() const { return k_; }
E
eclipsess 已提交
811

W
wangliu 已提交
812
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
813

朔-望's avatar
朔-望 已提交
814
 private:
815 816 817
  GType *input_x_;
  GType *out_;
  GType *mid_out_;
818 819 820 821
  int n_;
  float alpha_;
  float beta_;
  float k_;
W
wangliu 已提交
822
  string data_format_;
E
eclipsess 已提交
823
};
L
liuruilong 已提交
824 825
#endif

Z
zhaojiaying01 已提交
826 827
#ifdef NORM_OP
template <typename Dtype>
828
class NormParam : public OpParam {
Z
zhaojiaying01 已提交
829 830 831 832 833
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  NormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
834 835 836 837 838
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_norm_ = OutputNormFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
839 840 841 842
    epsilon_ = GetAttr<float>("epsilon", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }

843
  const GType *InputX() const { return input_x_; }
Z
zhaojiaying01 已提交
844

845
  GType *Out() const { return out_; }
Z
zhaojiaying01 已提交
846

847
  GType *OutputNorm() const { return output_norm_; }
Z
zhaojiaying01 已提交
848 849 850 851 852 853

  const float &Epsilon() const { return epsilon_; }

  const int &Axis() const { return axis_; }

 private:
854 855 856
  GType *input_x_;
  GType *out_;
  GType *output_norm_;
Z
zhaojiaying01 已提交
857 858 859 860 861
  float epsilon_;
  int axis_;
};
#endif

L
liuruilong 已提交
862
#ifdef BATCHNORM_OP
N
nhzlx 已提交
863
template <typename Dtype>
864
class BatchNormParam : public OpParam {
N
nhzlx 已提交
865 866 867
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
868
 public:
869
  BatchNormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
870 871 872 873 874 875 876 877
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_y_ = OutputYFrom<GType>(outputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, *scope);
878 879
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
L
liuruilong 已提交
880
    //    is_test_ = GetAttr<bool>("is_test", attrs);
881
  }
E
eclipsess 已提交
882

883
  ~BatchNormParam() {}
884

885
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
886

887
  GType *OutputY() const { return output_y_; }
E
eclipsess 已提交
888

889
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
890

891
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
892

893
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
894

895
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
896

897
  const float &Epsilon() const { return epsilon_; }
E
eclipsess 已提交
898

899
  const float &Momentum() const { return momentum_; }
E
eclipsess 已提交
900

901
  const bool &IsTest() const { return is_test_; }
E
eclipsess 已提交
902

W
wangliu 已提交
903
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
904

905 906 907
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
908

909 910 911
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
912

913
  const GType *NewScale() const { return new_scale_.get(); }
914

915
  const GType *NewBias() const { return new_bias_.get(); }
916

朔-望's avatar
朔-望 已提交
917
 private:
918 919 920 921 922 923
  GType *input_x_;
  GType *output_y_;
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
924 925 926
  float epsilon_;
  float momentum_;
  bool is_test_;
W
wangliu 已提交
927
  string data_format_;
928 929
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
E
eclipsess 已提交
930
};
L
liuruilong 已提交
931 932
#endif

933 934 935 936 937 938 939 940 941 942 943 944
#ifdef INSTANCENORM_OP
template <typename Dtype>
class InstanceNormParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  InstanceNormParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973
    output_y_ = OutputYFrom<GType>(outputs, *scope);
    epsilon_ = GetAttr<float>("epsilon", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *OutputY() const { return output_y_; }

  const float &Epsilon() const { return epsilon_; }

 private:
  GType *input_x_;
  GType *output_y_;
  float epsilon_;
};
#endif

#ifdef FUSION_INSTANCENORM_RELU_OP
template <typename Dtype>
class FusionInstanceNormReluParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionInstanceNormReluParam(const VariableNameMap &inputs,
                              const VariableNameMap &outputs,
                              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990
    out_ = OutFrom<GType>(outputs, *scope);
    epsilon_ = GetAttr<float>("epsilon", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *Out() const { return out_; }

  const float &Epsilon() const { return epsilon_; }

 private:
  GType *input_x_;
  GType *out_;
  float epsilon_;
};
#endif

L
liuruilong 已提交
991
#ifdef POOL_OP
N
nhzlx 已提交
992
template <typename Dtype>
993
class PoolParam : public OpParam {
N
nhzlx 已提交
994 995 996
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
997
 public:
998
  PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
999 1000 1001
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
1002

1003
    output_ = OutFrom<GType>(outputs, *scope);
1004
    pooling_type_ = GetStringAttr("pooling_type", attrs);
W
wangliu 已提交
1005 1006 1007
    ksize_ = GetAttr<vector<int>>("ksize", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
1008
    ceil_mode_ = GetAttr<bool>("ceil_mode", attrs);
1009
    global_pooling_ = GetAttr<bool>("global_pooling", attrs);
1010 1011 1012 1013 1014 1015

    if (HasAttr("exclusive", attrs)) {
      exclusive_ = GetAttr<bool>("exclusive", attrs);
    } else {
      exclusive_ = true;
    }
1016
  }
1017

1018
  const GType *Input() const { return input_; }
1019

1020
  GType *Output() const { return output_; }
1021

W
wangliu 已提交
1022
  const string &PoolingType() const { return pooling_type_; }
1023

W
wangliu 已提交
1024
  const vector<int> &Ksize() const { return ksize_; }
1025

W
wangliu 已提交
1026
  const vector<int> &Strides() const { return strides_; }
1027

W
wangliu 已提交
1028
  const vector<int> &Paddings() const { return paddings_; }
1029

1030
  bool isCeilMode() const { return ceil_mode_; }
1031

Z
zhangyang 已提交
1032
  bool isGlobalPooling() const { return global_pooling_; }
1033

1034 1035
  bool isExclusive() const { return exclusive_; }

朔-望's avatar
朔-望 已提交
1036
 private:
1037 1038
  GType *input_;
  GType *output_;
W
wangliu 已提交
1039 1040 1041 1042
  string pooling_type_;
  vector<int> ksize_;
  vector<int> strides_;
  vector<int> paddings_;
1043
  bool ceil_mode_;
1044
  bool global_pooling_ = false;
1045
  bool exclusive_ = true;
Z
zhangyang 已提交
1046
#ifdef PADDLE_MOBILE_FPGA
1047 1048

 private:
H
hanbuhe 已提交
1049
  fpga::PoolingArgs fpga_pool_args;
Z
zhangyang 已提交
1050 1051

 public:
H
hanbuhe 已提交
1052 1053
  const fpga::PoolingArgs &FpgaArgs() const { return fpga_pool_args; }
  void SetFpgaArgs(const fpga::PoolingArgs &args) { fpga_pool_args = args; }
Z
zhangyang 已提交
1054
#endif
1055
};
L
liuruilong 已提交
1056 1057 1058
#endif

#ifdef PRIORBOX_OP
N
nhzlx 已提交
1059
template <typename Dtype>
E
eclipsess 已提交
1060
class PriorBoxParam : public OpParam {
N
nhzlx 已提交
1061 1062 1063
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1064 1065
 public:
  PriorBoxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1066 1067 1068 1069 1070 1071
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    input_image_ = InputImageFrom<GType>(inputs, *scope);
    output_boxes_ = OutputBoxesFrom<GType>(outputs, *scope);
    output_variances_ = OutputVariancesFrom<GType>(outputs, *scope);
W
wangliu 已提交
1072 1073 1074 1075
    min_sizes_ = GetAttr<vector<float>>("min_sizes", attrs);
    max_sizes_ = GetAttr<vector<float>>("max_sizes", attrs);
    aspect_ratios_ = GetAttr<vector<float>>("aspect_ratios", attrs);
    variances_ = GetAttr<vector<float>>("variances", attrs);
1076 1077 1078 1079

    if (HasAttr("min_max_aspect_ratios_order", attrs)) {
      min_max_aspect_ratios_order_ =
          GetAttr<bool>("min_max_aspect_ratios_order", attrs);
Y
yangfei 已提交
1080 1081
    } else {
      min_max_aspect_ratios_order_ = false;
1082
    }
E
eclipsess 已提交
1083 1084 1085 1086 1087 1088
    flip_ = GetAttr<bool>("flip", attrs);
    clip_ = GetAttr<bool>("clip", attrs);
    step_w_ = GetAttr<float>("step_w", attrs);
    step_h_ = GetAttr<float>("step_h", attrs);
    offset_ = GetAttr<float>("offset", attrs);
  }
1089
  const GType *Input() const { return input_; }
E
eclipsess 已提交
1090

1091
  const GType *InputImage() const { return input_image_; }
E
eclipsess 已提交
1092

1093
  GType *OutputBoxes() const { return output_boxes_; }
E
eclipsess 已提交
1094

1095
  GType *OutputVariances() const { return output_variances_; }
E
eclipsess 已提交
1096

W
wangliu 已提交
1097
  const vector<float> &MinSizes() const { return min_sizes_; }
E
eclipsess 已提交
1098

W
wangliu 已提交
1099
  const vector<float> &MaxSizes() const { return max_sizes_; }
E
eclipsess 已提交
1100

W
wangliu 已提交
1101
  const vector<float> &AspectRatios() const { return aspect_ratios_; }
E
eclipsess 已提交
1102

W
wangliu 已提交
1103
  const vector<float> &Variances() const { return variances_; }
E
eclipsess 已提交
1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114

  const bool &Flip() const { return flip_; }

  const bool &Clip() const { return clip_; }

  const float &StepW() const { return step_w_; }

  const float &StepH() const { return step_h_; }

  const float &Offset() const { return offset_; }

1115 1116 1117 1118
  const bool &MinMaxAspectRatiosOrder() const {
    return min_max_aspect_ratios_order_;
  }

E
eclipsess 已提交
1119
 private:
1120 1121 1122 1123
  GType *input_;
  GType *input_image_;
  GType *output_boxes_;
  GType *output_variances_;
W
wangliu 已提交
1124 1125 1126 1127
  vector<float> min_sizes_;
  vector<float> max_sizes_;
  vector<float> aspect_ratios_;
  vector<float> variances_;
E
eclipsess 已提交
1128 1129 1130 1131 1132
  bool flip_;
  bool clip_;
  float step_w_;
  float step_h_;
  float offset_;
1133
  bool min_max_aspect_ratios_order_;
E
eclipsess 已提交
1134
};
L
liuruilong 已提交
1135
#endif
E
eclipsess 已提交
1136

L
liuruilong 已提交
1137
#ifdef BOXCODER_OP
N
nhzlx 已提交
1138
template <typename Dtype>
E
eclipsess 已提交
1139
class BoxCoderParam : public OpParam {
N
nhzlx 已提交
1140 1141 1142
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1143 1144
 public:
  BoxCoderParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1145 1146 1147 1148 1149 1150
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_priorbox_ = InputPriorBoxFrom<GType>(inputs, *scope);
    input_priorboxvar_ = InputPriorBoxVarFrom<GType>(inputs, *scope);
    input_targetbox_ = InputTargetBoxFrom<GType>(inputs, *scope);
    output_box_ = OutputBoxFrom<GType>(outputs, *scope);
1151
    code_type_ = GetStringAttr("code_type", attrs);
E
eclipsess 已提交
1152
  }
1153
  const GType *InputPriorBox() const { return input_priorbox_; }
E
eclipsess 已提交
1154

1155
  const GType *InputPriorBoxVar() const { return input_priorboxvar_; }
E
eclipsess 已提交
1156

1157
  const GType *InputTargetBox() const { return input_targetbox_; }
E
eclipsess 已提交
1158

1159
  GType *OutputBox() const { return output_box_; }
E
eclipsess 已提交
1160 1161 1162 1163

  const std::string &CodeType() const { return code_type_; }

 private:
1164 1165 1166 1167
  GType *input_priorbox_;
  GType *input_priorboxvar_;
  GType *input_targetbox_;
  GType *output_box_;
E
eclipsess 已提交
1168 1169
  std::string code_type_;
};
L
liuruilong 已提交
1170
#endif
W
wangliu 已提交
1171

L
liuruilong 已提交
1172
#ifdef SOFTMAX_OP
N
nhzlx 已提交
1173
template <typename Dtype>
W
wangliu 已提交
1174
class SoftmaxParam : public OpParam {
N
nhzlx 已提交
1175 1176 1177
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1178 1179
 public:
  SoftmaxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1180 1181 1182 1183
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
1184 1185 1186 1187
    if (HasAttr("axis", attrs)) {
      axis_ = GetAttr<int>("axis", attrs);
      has_axis_ = true;
    }
W
wangliu 已提交
1188
  }
H
hjchen2 已提交
1189 1190
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
1191

1192 1193 1194
  int axis_ = -1;
  bool has_axis_ = false;

W
wangliu 已提交
1195
 private:
H
hjchen2 已提交
1196 1197
  GType *input_x_;
  GType *out_;
H
hanbuhe 已提交
1198 1199 1200

#ifdef PADDLE_MOBILE_FPGA

1201 1202
#ifdef PADDLE_MOBILE_FPGA_V1

H
hanbuhe 已提交
1203
 private:
1204
  std::shared_ptr<GType> float_input_x_;
H
hanbuhe 已提交
1205 1206 1207
  fpga::BypassArgs fpga_bypass_args;

 public:
1208
  GType *FloatInput() const {
H
hanbuhe 已提交
1209 1210
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
H
hjchen2 已提交
1211
  void SetFloatInput(LoDTensor *input) { float_input_x_.reset(input); }
H
hanbuhe 已提交
1212 1213
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225
#else

 private:
  fpga::BypassArgs fpga_bypass_args;

 public:
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }

 public:
  std::shared_ptr<Tensor> float_input_x_, float_out;
#endif
H
hanbuhe 已提交
1226
#endif
W
wangliu 已提交
1227
};
L
liuruilong 已提交
1228
#endif
W
wangliu 已提交
1229

L
liuruilong 已提交
1230
#ifdef SIGMOID_OP
N
nhzlx 已提交
1231
template <typename Dtype>
W
wangliu 已提交
1232
class SigmoidParam : public OpParam {
N
nhzlx 已提交
1233 1234 1235
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1236 1237
 public:
  SigmoidParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1238 1239 1240 1241
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
1242
  }
1243 1244
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
1245 1246

 private:
1247 1248
  GType *input_x_;
  GType *out_;
1249 1250 1251 1252 1253 1254 1255 1256 1257
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::BypassArgs fpga_bypass_args;

 public:
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
W
wangliu 已提交
1258
};
L
liuruilong 已提交
1259 1260 1261
#endif

#ifdef MULTICLASSNMS_OP
N
nhzlx 已提交
1262
template <typename Dtype>
E
eclipsess 已提交
1263
class MultiClassNMSParam : public OpParam {
N
nhzlx 已提交
1264 1265 1266
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1267 1268 1269
 public:
  MultiClassNMSParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
1270 1271 1272 1273 1274
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_bboxes_ = InputBBoxesFrom<GType>(inputs, *scope);
    input_scores_ = InputScoresFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1275 1276 1277 1278 1279 1280 1281 1282
    background_label_ = GetAttr<int>("background_label", attrs);
    nms_top_k_ = GetAttr<int>("nms_top_k", attrs);
    keep_top_k_ = GetAttr<int>("keep_top_k", attrs);
    nms_threshold_ = GetAttr<float>("nms_threshold", attrs);
    nms_eta_ = GetAttr<float>("nms_eta", attrs);
    score_threshold_ = GetAttr<float>("score_threshold", attrs);
  }

1283
  GType *InputBBoxes() const { return input_bboxes_; }
E
eclipsess 已提交
1284

1285
  GType *InputScores() const { return input_scores_; }
E
eclipsess 已提交
1286

1287
  GType *Out() const { return out_; }
E
eclipsess 已提交
1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301

  const int &BackGroundLabel() const { return background_label_; }

  const int &NMSTopK() const { return nms_top_k_; }

  const int &KeepTopK() const { return keep_top_k_; }

  const float &NMSThreshold() const { return nms_threshold_; }

  const float &NMSEta() const { return nms_eta_; }

  const float &ScoreThreshold() const { return score_threshold_; }

 private:
1302 1303 1304
  GType *input_bboxes_;
  GType *input_scores_;
  GType *out_;
E
eclipsess 已提交
1305 1306 1307 1308 1309 1310 1311
  int background_label_;
  int nms_top_k_;
  int keep_top_k_;
  float nms_threshold_;
  float nms_eta_;
  float score_threshold_;
};
L
liuruilong 已提交
1312
#endif
W
wangliu 已提交
1313

L
lijiancheng0614 已提交
1314 1315 1316 1317 1318 1319 1320 1321 1322
#ifdef POLYGONBOXTRANSFORM_OP
template <typename Dtype>
class PolygonBoxTransformParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  PolygonBoxTransformParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
1323 1324 1325 1326
                           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    output_ = OutputFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1327
  }
1328 1329
  const GType *Input() const { return input_; }
  GType *Output() const { return output_; }
L
lijiancheng0614 已提交
1330 1331

 private:
1332 1333
  GType *input_;
  GType *output_;
L
lijiancheng0614 已提交
1334 1335 1336
};
#endif

N
nhzlx 已提交
1337
template <typename Dtype>
L
liuruilong 已提交
1338
class FeedParam : public OpParam {
N
nhzlx 已提交
1339 1340 1341
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1342 1343
 public:
  FeedParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
1344
            const AttributeMap &attrs, Scope *scope)
1345
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
1346
    input_x_ = InputXFrom<std::vector<LoDTensor>>(inputs, *scope);
H
update  
hjchen2 已提交
1347
    out_ = OutFrom<GType>(outputs, *scope);
H
update  
hjchen2 已提交
1348
    col_ = GetAttr<int>("col", attrs);
H
update  
hjchen2 已提交
1349
    auto var = scope->FindVar("batch_size");
W
wangliu 已提交
1350
    batch_size = var->GetValue<int>();
L
liuruilong 已提交
1351
  }
H
hjchen2 已提交
1352
  const std::vector<LoDTensor> *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
1353
  GType *Out() const { return out_; }
H
update  
hjchen2 已提交
1354
  const int Col() const { return col_; }
W
wangliu 已提交
1355
  const int BatchSize() const { return batch_size; }
L
liuruilong 已提交
1356

L
liuruilong 已提交
1357
 private:
H
hjchen2 已提交
1358
  std::vector<LoDTensor> *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1359
  GType *out_;
H
update  
hjchen2 已提交
1360
  int col_;
W
wangliu 已提交
1361
  int batch_size;
L
liuruilong 已提交
1362 1363
};

N
nhzlx 已提交
1364
template <typename Dtype>
L
liuruilong 已提交
1365
class FetchParam : public OpParam {
N
nhzlx 已提交
1366 1367 1368
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1369 1370
 public:
  FetchParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
1371
             const AttributeMap &attrs, Scope *scope)
1372
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
1373 1374
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<std::vector<LoDTensor>>(outputs, *scope);
1375
    col_ = GetAttr<int>("col", attrs);
L
liuruilong 已提交
1376
  }
L
liuruilong 已提交
1377

H
hjchen2 已提交
1378 1379
  const GType *InputX() const { return input_x_; }
  std::vector<LoDTensor> *Out() const { return out_; }
1380
  const int Col() const { return col_; }
L
liuruilong 已提交
1381

L
liuruilong 已提交
1382
 private:
H
hjchen2 已提交
1383 1384
  GType *input_x_;
  std::vector<LoDTensor> *out_;
1385
  int col_;
qnqinan's avatar
qnqinan 已提交
1386
#ifdef PADDLE_MOBILE_FPGA
1387

qnqinan's avatar
qnqinan 已提交
1388
 public:
1389
#ifdef PADDLE_MOBILE_FPGA_V1
qnqinan's avatar
qnqinan 已提交
1390
  fpga::BypassArgs fpga_bypass_args;
1391
  Tensor aligned_out;
1392 1393 1394
#else
  std::shared_ptr<Tensor> aligned_out;
#endif
qnqinan's avatar
qnqinan 已提交
1395
#endif
L
liuruilong 已提交
1396 1397
};

L
lijiancheng0614 已提交
1398 1399 1400 1401 1402 1403 1404 1405 1406
#ifdef FILL_CONSTANT_OP
template <typename Dtype>
class FillConstantParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FillConstantParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
1407 1408 1409 1410
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    out_var_ = OutVarFrom(outputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1411 1412 1413 1414 1415 1416 1417
    dtype_ = GetAttr<int>("dtype", attrs);
    shape_ = GetAttr<vector<int>>("shape", attrs);
    value_ = GetAttr<float>("value", attrs);
  }

  Variable *OutVar() const { return out_var_; }

1418
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1419 1420 1421 1422 1423 1424 1425 1426 1427

  const int &DataDtype() const { return dtype_; }

  const vector<int> &Shape() const { return shape_; }

  const float &Value() const { return value_; }

 private:
  Variable *out_var_;
1428
  GType *out_;
L
lijiancheng0614 已提交
1429 1430 1431 1432 1433 1434
  int dtype_;
  vector<int> shape_;
  float value_;
};
#endif

1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483
#ifdef FILL_CONSTANT_BATCH_SIZE_LIKE_OP
template <typename Dtype>
class FillConstantBatchSizeLikeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FillConstantBatchSizeLikeParam(const VariableNameMap &inputs,
                                 const VariableNameMap &outputs,
                                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    out_var_ = OutVarFrom(outputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    dtype_ = GetAttr<int>("dtype", attrs);
    shape_ = GetAttr<vector<int>>("shape", attrs);
    value_ = GetAttr<float>("value", attrs);
    input_dim_idx_ = GetAttr<int>("input_dim_idx", attrs);
    output_dim_idx_ = GetAttr<int>("output_dim_idx", attrs);
  }

  Variable *OutVar() const { return out_var_; }

  const GType *Input() const { return input_; }

  GType *Out() const { return out_; }

  const int &DataDtype() const { return dtype_; }

  const vector<int> &Shape() const { return shape_; }

  const float &Value() const { return value_; }

  int InputDimIdx() const { return input_dim_idx_; }

  int OutputDimIdx() const { return output_dim_idx_; }

 private:
  GType *input_;
  Variable *out_var_;
  GType *out_;
  int dtype_;
  vector<int> shape_;
  float value_;
  int input_dim_idx_;
  int output_dim_idx_;
};
#endif

L
liuruilong 已提交
1484
#ifdef TRANSPOSE_OP
N
nhzlx 已提交
1485
template <typename Dtype>
E
eclipsess 已提交
1486
class TransposeParam : public OpParam {
N
nhzlx 已提交
1487 1488 1489
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1490 1491
 public:
  TransposeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1492 1493 1494 1495
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1496 1497 1498
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

1499
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1500

1501
  GType *Out() const { return out_; }
E
eclipsess 已提交
1502 1503 1504 1505

  const vector<int> &Axis() const { return axis_; }

 private:
1506 1507
  GType *input_x_;
  GType *out_;
E
eclipsess 已提交
1508 1509
  vector<int> axis_;
};
L
liuruilong 已提交
1510
#endif
E
eclipsess 已提交
1511

L
lijiancheng0614 已提交
1512 1513 1514 1515 1516 1517 1518 1519
#ifdef TRANSPOSE2_OP
template <typename Dtype>
class Transpose2Param : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  Transpose2Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
1520 1521 1522 1523 1524
                  const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_xshape_ = OutputXShapeFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1525 1526 1527
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

1528
  GType *InputX() const { return input_x_; }
L
lijiancheng0614 已提交
1529

1530
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1531

1532
  GType *OutputXShape() const { return output_xshape_; }
L
lijiancheng0614 已提交
1533 1534 1535 1536

  const vector<int> &Axis() const { return axis_; }

 private:
1537 1538 1539
  GType *input_x_;
  GType *out_;
  GType *output_xshape_;
L
lijiancheng0614 已提交
1540 1541 1542 1543
  vector<int> axis_;
};
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
1544 1545 1546 1547 1548 1549 1550 1551
#ifdef LOOKUP_OP
template <typename Dtype>
class LookupParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LookupParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1552 1553 1554 1555 1556
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_w_ = InputWFrom<GType>(inputs, *scope);
    input_ids_ = InputIdsFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582
    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }

  const GType *InputW() const { return input_w_; }
  const GType *InputIds() const { return input_ids_; }
  GType *Out() const { return out_; }
  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_w_;
  GType *input_ids_;
  GType *out_;
  int64_t padding_idx_;
};
#endif

#ifdef CRF_OP
template <typename Dtype>
class CrfParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  //    {G_OP_TYPE_CRF, {{"Emission", "Transition", "Label"}, {"ViterbiPath"}}},

  CrfParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1583 1584
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
xiebaiyuan's avatar
xiebaiyuan 已提交
1585
    // todo crf params
1586 1587 1588 1589
    input_emission_ = InputEmissionFrom<GType>(inputs, *scope);
    input_transition_ = InputTransitionFrom<GType>(inputs, *scope);
    input_label_ = InputLabelFrom<GType>(inputs, *scope);
    output_viterbipath_ = OutputViterbiPathFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
1590 1591 1592 1593 1594 1595
    //    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }
  const GType *InputEmission() const { return input_emission_; }
  const GType *InputTransition() const { return input_transition_; }
  const GType *InputLabel() const { return input_label_; }
  GType *outputVBP() const { return output_viterbipath_; }
1596 1597
  //  const GType *InputIds() const { return input_ids_; }
  //  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
1598 1599 1600 1601 1602 1603 1604 1605
  //  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_emission_;
  GType *input_transition_;
  GType *input_label_;
  GType *output_viterbipath_;

1606 1607
  //  GType *input_ids_;
  //  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1608 1609 1610 1611
  //  int64_t padding_idx_;
};
#endif

L
liuruilong 已提交
1612
#ifdef RESHAPE_OP
N
nhzlx 已提交
1613
template <typename Dtype>
E
eclipsess 已提交
1614
class ReshapeParam : public OpParam {
N
nhzlx 已提交
1615 1616 1617
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1618 1619
 public:
  ReshapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1620 1621 1622 1623 1624
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1625
    shape_ = GetAttr<vector<int>>("shape", attrs);
1626 1627 1628 1629 1630 1631 1632

    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
      DLOG << "ReshapeParam lost inplace params. maybe fluid updated";
    }
E
eclipsess 已提交
1633 1634
  }

1635
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1636

1637
  const GType *InputShape() const { return input_shape_; }
E
eclipsess 已提交
1638

1639
  GType *Out() const { return out_; }
E
eclipsess 已提交
1640 1641 1642 1643 1644 1645

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
1646 1647 1648
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
E
eclipsess 已提交
1649 1650 1651
  vector<int> shape_;
  bool inplace_;
};
L
liuruilong 已提交
1652
#endif
E
eclipsess 已提交
1653

L
lijiancheng0614 已提交
1654 1655 1656 1657 1658 1659 1660 1661
#ifdef RESHAPE2_OP
template <typename Dtype>
class Reshape2Param : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  Reshape2Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
1662 1663 1664 1665 1666 1667
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_xshape_ = OutputXShapeFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1668 1669 1670 1671 1672 1673 1674 1675
    shape_ = GetAttr<vector<int>>("shape", attrs);
    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
    }
  }

1676
  GType *InputX() const { return input_x_; }
L
lijiancheng0614 已提交
1677

E
eclipsess 已提交
1678
  const GType *InputShape() const { return input_shape_; }
L
lijiancheng0614 已提交
1679

E
eclipsess 已提交
1680
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1681

E
eclipsess 已提交
1682
  GType *OutputXShape() const { return output_xshape_; }
L
lijiancheng0614 已提交
1683 1684 1685 1686 1687 1688

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
E
eclipsess 已提交
1689 1690 1691 1692
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
  GType *output_xshape_;
L
lijiancheng0614 已提交
1693 1694 1695 1696 1697
  vector<int> shape_;
  bool inplace_;
};
#endif

T
Tian 已提交
1698
#ifdef SCALE_OP
N
nhzlx 已提交
1699
template <typename Dtype>
I
itminner 已提交
1700
class ScaleParam : public OpParam {
N
nhzlx 已提交
1701 1702 1703
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1704 1705
 public:
  ScaleParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1706 1707 1708 1709
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
1710 1711
    scale_ = GetAttr<float>("scale", attrs);
    bias_ = GetAttr<float>("bias", attrs);
I
itminner 已提交
1712 1713
  }

1714
  const GType *InputX() const { return input_x_; }
I
itminner 已提交
1715

1716
  GType *Out() const { return out_; }
I
itminner 已提交
1717

1718
  const float Scale() const { return scale_; }
I
itminner 已提交
1719

1720
  const float Bias() const { return bias_; }
I
itminner 已提交
1721 1722

 private:
1723 1724
  GType *input_x_;
  GType *out_;
1725 1726
  float scale_;
  float bias_;
I
itminner 已提交
1727
};
T
Tian 已提交
1728 1729 1730
#endif

#ifdef SLICE_OP
N
nhzlx 已提交
1731
template <typename Dtype>
I
itminner 已提交
1732
class SliceParam : public OpParam {
N
nhzlx 已提交
1733 1734 1735
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1736 1737
 public:
  SliceParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1738 1739 1740 1741
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
I
itminner 已提交
1742

1743 1744 1745
    axes_ = GetAttr<std::vector<int>>("axes", attrs);
    starts_ = GetAttr<std::vector<int>>("starts", attrs);
    ends_ = GetAttr<std::vector<int>>("ends", attrs);
1746 1747

    original_output_dims_size_ = output_->dims().size();
1748
  }
I
itminner 已提交
1749

1750 1751 1752 1753 1754 1755
 public:
  GType *input_;
  GType *output_;
  std::vector<int> axes_;
  std::vector<int> starts_;
  std::vector<int> ends_;
1756
  int original_output_dims_size_;
I
itminner 已提交
1757
};
T
Tian 已提交
1758 1759 1760
#endif

#ifdef RESIZE_OP
N
nhzlx 已提交
1761
template <typename Dtype>
T
Tian 已提交
1762
class ResizeParam : public OpParam {
N
nhzlx 已提交
1763 1764 1765
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1766 1767
 public:
  ResizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1768 1769 1770 1771 1772
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
I
itminner 已提交
1773 1774 1775 1776 1777 1778
    is_pyramid_test_ = GetAttr<bool>("is_pyramid_test", attrs);
    height_ = GetAttr<int>("height", attrs);
    width_ = GetAttr<int>("width", attrs);
    out_height_scale_ = GetAttr<float>("out_height_scale", attrs);
    out_width_scale_ = GetAttr<float>("out_width_scale", attrs);
  }
T
Tian 已提交
1779

1780
  const GType *InputX() const { return input_x_; }
T
Tian 已提交
1781

1782
  const GType *InputShape() const { return input_shape_; }
T
Tian 已提交
1783

1784
  GType *Out() const { return out_; }
T
Tian 已提交
1785

I
itminner 已提交
1786
  const bool &IsPyramidTest() const { return is_pyramid_test_; }
T
Tian 已提交
1787

I
itminner 已提交
1788
  const int &Height() const { return height_; }
T
Tian 已提交
1789

I
itminner 已提交
1790
  const int &Width() const { return width_; }
T
Tian 已提交
1791

I
itminner 已提交
1792
  const float &OutHeightScale() const { return out_height_scale_; }
T
Tian 已提交
1793

I
itminner 已提交
1794
  const float &OutWidthScale() const { return out_width_scale_; }
T
Tian 已提交
1795

I
itminner 已提交
1796
 private:
1797 1798 1799
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
I
itminner 已提交
1800 1801 1802 1803 1804
  bool is_pyramid_test_;
  int height_;
  int width_;
  float out_height_scale_;
  float out_width_scale_;
T
Tian 已提交
1805 1806 1807
};
#endif

L
liuruilong 已提交
1808
#ifdef RELU_OP
L
liuruilong 已提交
1809 1810 1811
/*
 * @b op 层实例化好这个 param 传递给 kernel 层使用
 * */
N
nhzlx 已提交
1812
template <typename Dtype>
D
relu  
dolphin8 已提交
1813
class ReluParamBase : public OpParam {
N
nhzlx 已提交
1814 1815 1816
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1817
 public:
D
relu  
dolphin8 已提交
1818
  ReluParamBase(const VariableNameMap &inputs, const VariableNameMap &outputs,
1819 1820 1821 1822
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1823 1824
  }

1825
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1826

1827
  GType *Out() const { return out_; }
E
eclipsess 已提交
1828 1829

 private:
1830 1831
  GType *input_x_;
  GType *out_;
E
eclipsess 已提交
1832
};
D
relu  
dolphin8 已提交
1833 1834 1835

template <typename Dtype>
class ReluParam : public ReluParamBase<Dtype> {
Y
yangfei 已提交
1836
 public:
D
relu  
dolphin8 已提交
1837 1838 1839
  using ReluParamBase<Dtype>::ReluParamBase;
};

Z
zp7 已提交
1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853
template <typename Dtype>
class Relu6Param : public ReluParamBase<Dtype> {
 public:
  Relu6Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, Scope *scope)
      : ReluParamBase<Dtype>(inputs, outputs, attrs, scope) {
    threshold = OpParam::GetAttr<float>("threshold", attrs);
  }
  float getThreshold() const { return threshold; }

 private:
  float threshold;
};

Y
yangfei 已提交
1854
#ifdef PADDLE_MOBILE_CL
D
relu  
dolphin8 已提交
1855 1856
template <>
class ReluParam<GPU_CL> : public ReluParamBase<GPU_CL> {
Y
yangfei 已提交
1857
 public:
D
relu  
dolphin8 已提交
1858
  using ReluParamBase<GPU_CL>::ReluParamBase;
Y
yangfei 已提交
1859 1860 1861
  framework::CLImage &getMidImage() { return midImage; }

 private:
D
relu  
dolphin8 已提交
1862 1863
  framework::CLImage midImage;
};
Y
yangfei 已提交
1864
#endif
D
relu  
dolphin8 已提交
1865

L
liuruilong 已提交
1866
#endif
E
eclipsess 已提交
1867

Z
zhangyang 已提交
1868 1869 1870 1871 1872 1873 1874 1875
#ifdef TANH_OP
template <typename Dtype>
class TanhParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  TanhParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1876 1877 1878 1879
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Z
zhangyang 已提交
1880
  }
1881 1882
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
Z
zhangyang 已提交
1883 1884

 private:
1885 1886
  GType *input_x_;
  GType *out_;
qnqinan's avatar
qnqinan 已提交
1887 1888 1889
#ifdef PADDLE_MOBILE_FPGA

 private:
1890
  std::shared_ptr<GType> float_input_x_;
qnqinan's avatar
qnqinan 已提交
1891 1892 1893
  fpga::BypassArgs fpga_bypass_args;

 public:
1894
  GType *FloatInput() const {
qnqinan's avatar
qnqinan 已提交
1895 1896
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
H
hjchen2 已提交
1897
  void SetFloatInput(LoDTensor *input) { float_input_x_.reset(input); }
qnqinan's avatar
qnqinan 已提交
1898 1899 1900
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
Z
zhangyang 已提交
1901
};
L
liuruilong 已提交
1902
#endif
E
eclipsess 已提交
1903

T
Tian 已提交
1904
#ifdef PRELU_OP
N
nhzlx 已提交
1905
template <typename Dtype>
T
Tian 已提交
1906
class PReluParam : public OpParam {
N
nhzlx 已提交
1907 1908 1909
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1910 1911
 public:
  PReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1912 1913
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
1914
    DLOG << "PReluParam inputs before";
1915 1916
    input_x_ = InputXFrom<GType>(inputs, *scope);
    alpha_ = InputAlphaFrom<GType>(inputs, *scope);
1917
    framework::DDim dims = alpha_->dims();
1918
    out_ = OutFrom<GType>(outputs, *scope);
1919
    mode_ = GetStringAttr("mode", attrs);
1920
    DLOG << "PReluParam mode after" << mode_;
I
itminner 已提交
1921
  }
1922 1923 1924
  const GType *InputX() const { return input_x_; }
  const GType *InputAlpha() const { return alpha_; }
  GType *Out() const { return out_; }
1925
  const std::string &Mode() const { return mode_; }
T
Tian 已提交
1926

I
itminner 已提交
1927
 private:
1928 1929 1930
  GType *input_x_;
  GType *out_;
  GType *alpha_;
1931
  std::string mode_;
T
Tian 已提交
1932 1933 1934
};
#endif

1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959
#ifdef LEAKY_RELU_OP
template <typename Dtype>
class LeakyReluParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LeakyReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    alpha_ = GetAttr<float>("alpha", attrs);
  }
  const GType *InputX() const { return input_x_; }
  const float Alpha() const { return alpha_; }
  GType *Out() const { return out_; }

 private:
  GType *input_x_;
  GType *out_;
  float alpha_;
};
#endif

N
nhzlx 已提交
1960
template <typename Dtype>
L
liuruilong 已提交
1961
class FusionFcParam : public OpParam {
N
nhzlx 已提交
1962 1963 1964
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1965
 public:
L
liuruilong 已提交
1966
  FusionFcParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1967 1968 1969 1970 1971 1972
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    input_z_ = InputZFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1973 1974 1975 1976
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }
Y
yangfei 已提交
1977
  GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1978

1979
  GType *InputY() const { return input_y_; }
E
eclipsess 已提交
1980

1981
  GType *InputZ() const { return input_z_; }
E
eclipsess 已提交
1982

xiebaiyuan's avatar
xiebaiyuan 已提交
1983
  GType *Out() const { return out_; }
E
eclipsess 已提交
1984 1985 1986 1987 1988 1989 1990 1991

  const int &XNumColDims() const { return x_num_col_dims_; }

  const int &YNumColDims() const { return y_num_col_dims_; }

  const int &Axis() const { return axis_; }

 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
1992
  GType *input_x_;
1993 1994
  GType *input_y_;
  GType *input_z_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1995
  GType *out_;
E
eclipsess 已提交
1996 1997 1998
  int x_num_col_dims_;
  int y_num_col_dims_;
  int axis_;
Z
zhangyang 已提交
1999

Z
ZhenWang 已提交
2000
#ifdef PADDLE_MOBILE_FPGA
2001
 private:  // NOLINT
Z
zhangyang 已提交
2002
  fpga::SplitConvArgs fpga_conv_args;
Z
zhangyang 已提交
2003 2004

 public:
Z
zhangyang 已提交
2005 2006
  const fpga::SplitConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::SplitConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
2007
#endif
E
eclipsess 已提交
2008
};
2009 2010

#ifdef FUSION_FCRELU_OP
N
nhzlx 已提交
2011 2012
template <typename DeviceType>
using FusionFcReluParam = FusionFcParam<DeviceType>;
L
liuruilong 已提交
2013
#endif
E
eclipsess 已提交
2014

N
nhzlx 已提交
2015
template <typename Dtype>
2016
class FusionConvAddParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2017 2018 2019
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
2020
 public:
L
liuruilong 已提交
2021
  FusionConvAddParam(const VariableNameMap &inputs,
L
liuruilong 已提交
2022
                     const VariableNameMap &outputs, const AttributeMap &attrs,
2023
                     Scope *scope)
2024
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2025
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2026
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2027
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
2028
  }
2029
  GType *Bias() const { return bias_; }
W
wangliu 已提交
2030 2031 2032

  const int &Axis() const { return axis_; }

L
liuruilong 已提交
2033
 protected:
2034
  GType *bias_;
W
wangliu 已提交
2035 2036 2037
  int axis_;
};

N
nhzlx 已提交
2038 2039
template <typename Dtype>
Print &operator<<(Print &printer, const FusionConvAddParam<Dtype> &conv_param);
W
wangliu 已提交
2040

Z
zhangyang 已提交
2041
#ifdef FUSION_CONVADDRELU_OP
N
nhzlx 已提交
2042 2043
template <typename DeviceType>
class FusionConvAddReluParam : public FusionConvAddParam<DeviceType> {
L
liuruilong 已提交
2044
 public:
L
liuruilong 已提交
2045
  FusionConvAddReluParam(const VariableNameMap &inputs,
L
liuruilong 已提交
2046
                         const VariableNameMap &outputs,
2047
                         const AttributeMap &attrs, Scope *scope)
2048
      : FusionConvAddParam<DeviceType>(inputs, outputs, attrs, scope) {}
L
liuruilong 已提交
2049 2050 2051
};
#endif

2052
#ifdef FUSION_CONVADDPRELU_OP
2053 2054 2055 2056
template <typename Dtype>
class FusionConvAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2057 2058 2059 2060

 public:
  FusionConvAddPReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2061
                          const AttributeMap &attrs, Scope *scope)
2062
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2063
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, *scope);
2064
    mode_ = OpParam::GetStringAttr("mode", attrs);
2065
    framework::DDim dims = alpha_->dims();
2066
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2067
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2068
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2069
  }
2070
  const GType *InputAlpha() const { return alpha_; }
2071
  const std::string &Mode() const { return mode_; }
2072
  GType *Bias() const { return bias_; }
2073 2074 2075
  const int &Axis() const { return axis_; }

 protected:
2076
  GType *bias_;
2077
  int axis_;
2078
  GType *alpha_;
2079 2080 2081 2082 2083
  std::string mode_;
};
#endif

#ifdef FUSION_CONVADDADDPRELU_OP
2084 2085 2086 2087
template <typename Dtype>
class FusionConvAddAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2088 2089 2090 2091

 public:
  FusionConvAddAddPReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
2092
                             const AttributeMap &attrs, Scope *scope)
2093
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2094 2095
    bias1_ = OpParam::InputYFrom1<GType>(inputs, *scope);
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, *scope);
2096
    mode_ = OpParam::GetStringAttr("mode", attrs);
2097
    framework::DDim dims = alpha_->dims();
H
update  
hjchen2 已提交
2098
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2099
    axis_ = OpParam::GetAttr<int>("axis", attrs);
2100 2101 2102
    keyOutput_ = OpParam::Getkey("addOut", inputs, 0);
    keyX1_ = OpParam::Getkey("addX", inputs, 1);
    keyY1_ = OpParam::Getkey("Y", inputs, 1);
2103
    if (keyX1_ == keyOutput_) {
2104
      bias1_ = OpParam::InputYFrom1<GType>(inputs, *scope);
2105
    } else if (keyY1_ == keyOutput_) {
2106
      bias1_ = OpParam::InputXFrom1<GType>(inputs, *scope);
2107
    }
H
update  
hjchen2 已提交
2108
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2109
  }
2110
  const GType *InputAlpha() const { return alpha_; }
2111
  const std::string &Mode() const { return mode_; }
2112
  const GType *Bias1() const { return bias1_; }
2113

2114
  GType *Bias() const { return bias_; }
2115 2116 2117 2118

  const int &Axis() const { return axis_; }

 protected:
2119
  GType *bias_;
2120
  int axis_;
2121
  GType *alpha_;
2122
  std::string mode_;
2123
  GType *bias1_;
2124 2125 2126 2127 2128 2129
  std::string keyOutput_;
  std::string keyX1_;
  std::string keyY1_;
};
#endif

E
eclipsess 已提交
2130
#ifdef FUSION_CONVADDBNRELU_OP
N
nhzlx 已提交
2131
template <typename Dtype>
2132
class FusionConvAddBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2133 2134 2135
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
2136 2137 2138
 public:
  FusionConvAddBNReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
2139
                           const AttributeMap &attrs, Scope *scope)
2140
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2141
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2142
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2143 2144 2145 2146
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2147 2148
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2149
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
2150
  }
2151

2152
  ~FusionConvAddBNReluParam() {}
2153

2154
  GType *Bias() const { return bias_; }
E
eclipsess 已提交
2155 2156 2157

  const int &Axis() const { return axis_; }

2158
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
2159

2160
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
2161

2162
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
2163

2164
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
2165 2166 2167 2168 2169

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2170 2171 2172
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2173

2174 2175 2176
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2177

2178
  const GType *NewScale() const { return new_scale_.get(); }
E
eclipsess 已提交
2179

2180
  const GType *NewBias() const { return new_bias_.get(); }
E
eclipsess 已提交
2181 2182

 protected:
2183
  GType *bias_;
E
eclipsess 已提交
2184
  int axis_;
2185 2186 2187 2188
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
E
eclipsess 已提交
2189 2190
  float epsilon_;
  float momentum_;
2191 2192
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2193 2194 2195 2196 2197
};
#endif

#ifdef FUSION_CONVBNADDRELU_OP
template <typename Dtype>
2198
class FusionConvBNAddReluParam : public ConvParam<Dtype> {
2199 2200 2201 2202 2203 2204
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvBNAddReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
2205
                           const AttributeMap &attrs, Scope *scope)
2206
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2207
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2208
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2209 2210 2211 2212
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2213 2214
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
2215 2216 2217
    keyBNY_ = OpParam::Getkey("BNY", inputs, 0);
    keyX_ = OpParam::Getkey("X", inputs, 0);
    keyY_ = OpParam::Getkey("Y", inputs, 0);
2218
    if (keyX_ == keyBNY_) {
2219
      bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2220
    } else if (keyY_ == keyBNY_) {
2221
      bias_ = OpParam::InputXFrom<GType>(inputs, *scope);
2222
    }
H
update  
hjchen2 已提交
2223
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2224
  }
2225

2226
  ~FusionConvBNAddReluParam() {}
2227
  GType *Bias() const { return bias_; }
2228 2229 2230

  const int &Axis() const { return axis_; }

2231
  const GType *InputBias() const { return input_bias_; }
2232

2233
  const GType *InputMean() const { return input_mean_; }
2234

2235
  const GType *InputScale() const { return input_scale_; }
2236

2237
  const GType *InputVariance() const { return input_variance_; }
2238 2239 2240 2241 2242

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2243 2244 2245
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2246

2247 2248 2249
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2250

2251
  const GType *NewScale() const { return new_scale_.get(); }
2252

2253
  const GType *NewBias() const { return new_bias_.get(); }
2254 2255

 protected:
2256
  GType *bias_;
2257
  int axis_;
2258 2259 2260 2261
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2262 2263
  float epsilon_;
  float momentum_;
2264 2265
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2266 2267 2268
  std::string keyBNY_;
  std::string keyX_;
  std::string keyY_;
E
eclipsess 已提交
2269
};
2270
#endif
E
eclipsess 已提交
2271

Z
zhangyang 已提交
2272
#ifdef FUSION_CONVBN_OP
N
nhzlx 已提交
2273
template <typename Dtype>
2274
class FusionConvBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2275 2276 2277
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Z
zhangyang 已提交
2278 2279 2280
 public:
  FusionConvBNParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
2281
                    Scope *scope)
2282
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2283 2284 2285 2286
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2287 2288
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2289
    this->output_ = OpParam::OutputYFrom<GType>(outputs, *scope);
Z
zhangyang 已提交
2290 2291
  }

2292
  const GType *InputBias() const { return input_bias_; }
Z
zhangyang 已提交
2293

2294
  const GType *InputMean() const { return input_mean_; }
Z
zhangyang 已提交
2295

2296
  const GType *InputScale() const { return input_scale_; }
Z
zhangyang 已提交
2297

2298
  const GType *InputVariance() const { return input_variance_; }
Z
zhangyang 已提交
2299 2300 2301 2302 2303

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2304 2305 2306
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
Z
zhangyang 已提交
2307

2308 2309 2310
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
Z
zhangyang 已提交
2311

2312
  const GType *NewScale() const { return new_scale_.get(); }
Z
zhangyang 已提交
2313

2314
  const GType *NewBias() const { return new_bias_.get(); }
Z
zhangyang 已提交
2315 2316

 protected:
2317 2318 2319 2320
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
Z
zhangyang 已提交
2321 2322
  float epsilon_;
  float momentum_;
2323 2324
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
Z
zhangyang 已提交
2325 2326 2327
};
#endif

2328
#ifdef FUSION_CONVADDBN_OP
N
nhzlx 已提交
2329
template <typename Dtype>
2330
class FusionConvAddBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2331 2332 2333
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

2334 2335 2336
 public:
  FusionConvAddBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
2337
                       const AttributeMap &attrs, Scope *scope)
2338
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2339
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2340
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2341 2342 2343 2344
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2345 2346
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2347
    this->output_ = OpParam::OutputYFrom<GType>(outputs, *scope);
2348
  }
2349
  GType *Bias() const { return bias_; }
2350 2351 2352

  const int &Axis() const { return axis_; }

2353
  const GType *InputBias() const { return input_bias_; }
2354

2355
  const GType *InputMean() const { return input_mean_; }
2356

2357
  const GType *InputScale() const { return input_scale_; }
2358

2359
  const GType *InputVariance() const { return input_variance_; }
2360 2361 2362 2363 2364

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2365 2366 2367
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2368

2369 2370 2371
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2372

2373
  const GType *NewScale() const { return new_scale_.get(); }
2374

2375
  const GType *NewBias() const { return new_bias_.get(); }
2376 2377

 protected:
2378
  GType *bias_;
2379
  int axis_;
2380 2381 2382 2383
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2384 2385
  float epsilon_;
  float momentum_;
2386 2387
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2388
};
E
eclipsess 已提交
2389
#endif
Y
Yao,kun 已提交
2390

E
eclipsess 已提交
2391
#ifdef FUSION_DWCONVBNRELU_OP
N
nhzlx 已提交
2392
template <typename Dtype>
2393
class FusionDWConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2394 2395 2396
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
2397 2398 2399
 public:
  FusionDWConvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2400
                          const AttributeMap &attrs, Scope *scope)
2401
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2402 2403 2404 2405
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2406 2407
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2408
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
2409 2410
  }

2411
  ~FusionDWConvBNReluParam() {}
2412

2413
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
2414

2415
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
2416

2417
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
2418

2419
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
2420 2421 2422 2423 2424

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2425 2426 2427
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2428

2429 2430 2431
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2432

2433
  const GType *NewScale() const { return new_scale_.get(); }
E
eclipsess 已提交
2434

2435
  const GType *NewBias() const { return new_bias_.get(); }
E
eclipsess 已提交
2436 2437

 protected:
2438 2439 2440 2441
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
E
eclipsess 已提交
2442 2443
  float epsilon_;
  float momentum_;
2444 2445
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
E
eclipsess 已提交
2446 2447 2448 2449
};

#endif

2450 2451 2452 2453 2454 2455 2456 2457 2458 2459 2460 2461 2462 2463 2464 2465
#ifdef FUSION_CONVRELU_OP
template <typename Dtype>
class FusionConvReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvReluParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
                      Scope *scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
  }
};
#endif

2466
#ifdef FUSION_CONVBNRELU_OP
N
nhzlx 已提交
2467
template <typename Dtype>
2468
class FusionConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2469 2470 2471
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

2472 2473 2474
 public:
  FusionConvBNReluParam(const VariableNameMap &inputs,
                        const VariableNameMap &outputs,
2475
                        const AttributeMap &attrs, Scope *scope)
2476
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2477 2478 2479 2480
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2481 2482
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2483
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2484 2485
  }

2486
  ~FusionConvBNReluParam() {}
2487

2488
  const GType *InputBias() const { return input_bias_; }
2489

2490
  const GType *InputMean() const { return input_mean_; }
2491

2492
  const GType *InputScale() const { return input_scale_; }
2493

2494
  const GType *InputVariance() const { return input_variance_; }
2495 2496 2497 2498 2499

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2500 2501 2502
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2503

2504 2505 2506
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2507

2508
  const GType *NewScale() const { return new_scale_.get(); }
2509

2510
  const GType *NewBias() const { return new_bias_.get(); }
2511 2512

 protected:
2513 2514 2515 2516
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2517 2518
  float epsilon_;
  float momentum_;
2519 2520
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2521 2522 2523
};
#endif

Y
Yao,kun 已提交
2524
#ifdef IM2SEQUENCE_OP
N
nhzlx 已提交
2525
template <typename Dtype>
Y
Yao,kun 已提交
2526
class Im2SequenceParam : public OpParam {
N
nhzlx 已提交
2527 2528 2529
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
2530 2531 2532
 public:
  Im2SequenceParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
2533 2534 2535 2536
                   Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Y
Yao,kun 已提交
2537 2538 2539 2540 2541
    kernels_ = GetAttr<vector<int>>("kernels", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
  }

E
eclipsess 已提交
2542
  const GType *Input() const { return input_x_; }
Y
Yao,kun 已提交
2543

E
eclipsess 已提交
2544
  GType *Output() const { return out_; }
Y
Yao,kun 已提交
2545 2546 2547 2548 2549 2550 2551 2552

  const vector<int> &Kernels() const { return kernels_; }

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

 private:
E
eclipsess 已提交
2553 2554
  GType *input_x_;
  GType *out_;
Y
Yao,kun 已提交
2555 2556 2557 2558
  vector<int> kernels_;
  vector<int> strides_;
  vector<int> paddings_;
};
2559
#endif
Y
Yao,kun 已提交
2560

2561
#ifdef DROPOUT_OP
N
nhzlx 已提交
2562
template <typename Dtype>
Y
Yao,kun 已提交
2563
class DropoutParam : public OpParam {
N
nhzlx 已提交
2564 2565 2566
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
2567 2568
 public:
  DropoutParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2569 2570 2571 2572
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Y
yangfei 已提交
2573 2574

    dropout_prob_ = GetAttr<float>("dropout_prob", attrs);
Y
Yao,kun 已提交
2575 2576
  }

2577
  const GType *InputX() const { return input_x_; }
Y
Yao,kun 已提交
2578

2579
  GType *Out() const { return out_; }
Y
Yao,kun 已提交
2580

Y
yangfei 已提交
2581 2582
  float DropoutProb() const { return dropout_prob_; }

Y
Yao,kun 已提交
2583
 private:
2584 2585
  GType *input_x_;
  GType *out_;
Y
yangfei 已提交
2586
  float dropout_prob_;
Y
Yao,kun 已提交
2587
};
2588
#endif
Y
Yao,kun 已提交
2589

N
nhzlx 已提交
2590
template <typename Dtype>
L
liuruilong 已提交
2591
class ConvTransposeParam : public OpParam {
N
nhzlx 已提交
2592 2593 2594
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
2595 2596 2597
 public:
  ConvTransposeParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
2598 2599
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
2600 2601
    filter_ = OpParam::FilterFrom<GType>(inputs, *scope);
    input_ = OpParam::InputFrom<GType>(inputs, *scope);
2602
    // output_ = OutputFrom<GType>(outputs, scope);
qnqinan's avatar
qnqinan 已提交
2603
    if (outputs.count("Output")) {
2604
      output_ = OpParam::OutputFrom<GType>(outputs, *scope);
qnqinan's avatar
qnqinan 已提交
2605
    }
L
liuruilong 已提交
2606 2607 2608
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
2609 2610 2611 2612
    if (HasAttr("output_size", attrs)) {
      output_size_ = GetAttr<vector<int>>("output_size", attrs);
      DLOG << "conv transpose output size: " << output_size_;
    }
L
liuruilong 已提交
2613 2614 2615
    groups = GetAttr<int>("groups", attrs);
  }

2616
  const GType *Input() const { return input_; }
L
liuruilong 已提交
2617

2618
  GType *Filter() const { return filter_; }
L
liuruilong 已提交
2619

2620
  GType *Output() const { return output_; }
L
liuruilong 已提交
2621 2622 2623 2624 2625

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

2626 2627 2628 2629
  const vector<int> &Filters() const { return filter_; }

  const vector<int> &TransFilters() const { return transformed_filter_; }

L
liuruilong 已提交
2630 2631
  const vector<int> &Dilations() const { return dilations_; }

2632 2633
  const vector<int> &OutputSize() const { return output_size_; }

L
liuruilong 已提交
2634 2635
  const int &Groups() const { return groups; }

H
hjchen2 已提交
2636 2637 2638 2639 2640
  enum ExecMode {
    EXEC_INVALID = 0,
    EXEC_GEMM_FLOAT,
    EXEC_DECONV3X3_FLOAT,
    EXEC_DECONV4X4_FLOAT,
2641 2642
    EXEC_DEPTHWISETRANS_FLOAT,
    EXEC_CONVTRANS3x3s2_FLOAT,
2643
    EXEC_CONVTRANS_FLOAT,
H
hjchen2 已提交
2644 2645 2646 2647
  };

  ExecMode &ExecMode() const { return exec_mode_; }

L
liuruilong 已提交
2648
 private:
2649 2650 2651
  GType *input_;
  GType *output_;
  GType *filter_;
2652
  GType *transformed_filter_;
L
liuruilong 已提交
2653 2654 2655
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
2656
  vector<int> output_size_;
L
liuruilong 已提交
2657
  int groups;
H
hjchen2 已提交
2658
  mutable enum ExecMode exec_mode_;
Z
zhangyang 已提交
2659 2660 2661 2662 2663

#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::DeconvArgs fpga_conv_args;
qnqinan's avatar
qnqinan 已提交
2664
  fpga::DWDeconvArgs fpga_DWDeconv_args;
Z
zhangyang 已提交
2665 2666 2667

 public:
  const fpga::DeconvArgs &FpgaArgs() const { return fpga_conv_args; }
qnqinan's avatar
qnqinan 已提交
2668 2669 2670
  const fpga::DWDeconvArgs &FpgaDWDconvArgs() const {
    return fpga_DWDeconv_args;
  }
Z
zhangyang 已提交
2671
  void SetFpgaArgs(const fpga::DeconvArgs &args) { fpga_conv_args = args; }
qnqinan's avatar
qnqinan 已提交
2672 2673 2674
  void SetFpgaArgs(const fpga::DWDeconvArgs &args) {
    fpga_DWDeconv_args = args;
  }
Z
zhangyang 已提交
2675
#endif
L
liuruilong 已提交
2676
};
Z
zhangyang 已提交
2677

qnqinan's avatar
qnqinan 已提交
2678 2679 2680 2681 2682
#ifdef FUSION_DECONVADD_OP
template <typename Dtype>
class FusionDeconvAddParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2683 2684

 public:
qnqinan's avatar
qnqinan 已提交
2685
  FusionDeconvAddParam(const VariableNameMap &inputs,
2686
                       const VariableNameMap &outputs,
2687
                       const AttributeMap &attrs, Scope *scope)
2688
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2689
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
qnqinan's avatar
qnqinan 已提交
2690
    axis_ = OpParam::GetAttr<int>("axis", attrs);
2691
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
qnqinan's avatar
qnqinan 已提交
2692
  }
2693
  GType *Bias() const { return bias_; }
qnqinan's avatar
qnqinan 已提交
2694 2695 2696

  const int &Axis() const { return axis_; }

2697
  GType *Output() const { return output_; }
qnqinan's avatar
qnqinan 已提交
2698 2699

 protected:
2700
  GType *bias_;
qnqinan's avatar
qnqinan 已提交
2701
  int axis_;
2702
  GType *output_;
qnqinan's avatar
qnqinan 已提交
2703 2704 2705 2706 2707 2708 2709
};
#endif

#ifdef FUSION_DECONVADDRELU_OP
template <typename Dtype>
using FusionDeconvAddReluParam = FusionDeconvAddParam<Dtype>;
#endif
2710 2711 2712 2713 2714 2715 2716 2717 2718
#ifdef FUSION_DECONVADDBN_OP
template <typename Dtype>
class FusionDeconvAddBNParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvAddBNParam(const VariableNameMap &inputs,
                         const VariableNameMap &outputs,
2719
                         const AttributeMap &attrs, Scope *scope)
2720
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2721 2722 2723 2724 2725
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2726 2727 2728 2729 2730 2731 2732
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }
2733 2734 2735 2736 2737 2738 2739 2740 2741 2742 2743 2744 2745

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2746 2747 2748
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2749

2750 2751 2752
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2753

2754
  const RType *NewScale() const { return new_scale_.get(); }
2755

2756
  const RType *NewBias() const { return new_bias_.get(); }
2757 2758 2759 2760 2761 2762 2763 2764 2765 2766

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2767 2768
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2769 2770 2771 2772 2773 2774 2775 2776 2777 2778 2779
};
#endif
#ifdef FUSION_DECONVBNRELU_OP
template <typename Dtype>
class FusionDeconvBNReluParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2780
                          const AttributeMap &attrs, Scope *scope)
2781
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2782 2783 2784 2785 2786
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2787 2788 2789 2790 2791 2792
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }
2793 2794 2795 2796 2797 2798 2799 2800 2801 2802 2803 2804 2805

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2806 2807 2808
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2809

2810 2811 2812
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2813

2814
  const RType *NewScale() const { return new_scale_.get(); }
2815

2816
  const RType *NewBias() const { return new_bias_.get(); }
2817 2818 2819 2820 2821 2822 2823 2824 2825 2826

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2827 2828
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839
};
#endif
#ifdef FUSION_DECONVADDBNRELU_OP
template <typename Dtype>
class FusionDeconvAddBNReluParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvAddBNReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
2840
                             const AttributeMap &attrs, Scope *scope)
2841
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2842 2843 2844 2845 2846
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2847 2848 2849 2850 2851 2852 2853 2854 2855 2856 2857 2858 2859 2860 2861 2862 2863 2864 2865 2866
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2867 2868 2869
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2870

2871 2872 2873
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2874

2875
  const RType *NewScale() const { return new_scale_.get(); }
2876

2877
  const RType *NewBias() const { return new_bias_.get(); }
2878 2879 2880 2881 2882 2883 2884 2885 2886 2887

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2888 2889
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2890 2891
};
#endif
L
liuruilong 已提交
2892

Z
zhangyang 已提交
2893 2894 2895 2896 2897
#ifdef FUSION_DECONVRELU_OP
template <typename Dtype>
using FusionDeconvReluParam = ConvTransposeParam<Dtype>;
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911
#ifdef GRU_OP
template <typename Dtype>
class GruParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  /**
   *
   * @param inputs
   * @param outputs
   * @param attrs
   * @param scope
   * */
  GruParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2912 2913 2914 2915 2916 2917 2918 2919
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_input_ = InputFrom<GType>(inputs, *scope);
    input_h0_ = InputH0From<GType>(inputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_weight_ = InputWeightFrom<GType>(inputs, *scope);

    output_batch_gate_ = OutputBatchGateFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2920
    output_batch_reset_hidden_prev_ =
2921 2922 2923
        OutputBatchResetHiddenPrevFrom<GType>(outputs, *scope);
    output_batch_hidden_ = OutputBatchHiddenFrom<GType>(outputs, *scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, *scope);
2924 2925
    activation_ = GetStringAttr("activation", attrs);
    gate_activation_ = GetStringAttr("gate_activation", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942 2943 2944 2945 2946 2947 2948 2949 2950 2951 2952 2953 2954 2955 2956 2957 2958
    is_reverse_ = GetAttr<bool>("is_reverse", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputH0() const { return input_h0_; }
  const GType *InputBias() const { return input_bias_; }
  const std::string &Activation() const { return activation_; }
  const std::string &GateActivation() const { return gate_activation_; }
  const bool &IsReverse() const { return is_reverse_; }

  GType *OutBatchGate() const { return output_batch_gate_; }
  GType *OutBatchResetHiddenPrev() const {
    return output_batch_reset_hidden_prev_;
  }
  GType *OutBatchHidden() const { return output_batch_hidden_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_h0_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_batch_gate_;
  GType *output_batch_reset_hidden_prev_;
  GType *output_batch_hidden_;
  GType *output_hidden_;
  std::string activation_;
  std::string gate_activation_;
  bool is_reverse_;
};
#endif

Z
zhaojiaying01 已提交
2959 2960 2961 2962 2963 2964 2965
#ifdef GRU_UNIT_OP
template <typename Dtype>
class GruUnitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  GruUnitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2966 2967 2968 2969 2970 2971 2972 2973
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_input_ = InputFrom<GType>(inputs, *scope);
    input_hidden_prev_ = InputHiddenPrevFrom<GType>(inputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_weight_ = InputWeightFrom<GType>(inputs, *scope);

    output_gate_ = OutputGateFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
2974
    output_reset_hidden_prev_ =
2975 2976
        OutputResetHiddenPrevFrom<GType>(outputs, *scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
2977 2978 2979 2980 2981 2982 2983 2984 2985 2986 2987 2988 2989 2990 2991 2992 2993 2994 2995 2996 2997 2998 2999 3000 3001 3002 3003 3004
    activation_ = GetAttr<int>("activation", attrs);
    gate_activation_ = GetAttr<int>("gate_activation", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputHiddenPrev() const { return input_hidden_prev_; }
  const GType *InputBias() const { return input_bias_; }
  const int &Activation() const { return activation_; }
  const int &GateActivation() const { return gate_activation_; }

  GType *OutGate() const { return output_gate_; }
  GType *OutResetHiddenPrev() const { return output_reset_hidden_prev_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_hidden_prev_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_gate_;
  GType *output_reset_hidden_prev_;
  GType *output_hidden_;
  int activation_;
  int gate_activation_;
};
#endif

3005 3006 3007 3008 3009 3010 3011 3012
#ifdef FLATTEN_OP
template <typename Dtype>
class FlattenParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FlattenParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3013 3014 3015 3016
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3017
    axis = GetAttr<int>("axis", attrs);
3018
  }
3019 3020
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3021
  const int &Axis() const { return axis; }
3022 3023

 private:
3024 3025
  GType *input_x_;
  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3026
  int axis;
3027 3028 3029 3030 3031 3032 3033 3034 3035 3036 3037
};
#endif

#ifdef SPLIT_OP
template <typename Dtype>
class SplitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SplitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3038 3039 3040 3041
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    outs_ = OutMultiFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3042
    axis = GetAttr<int>("axis", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
3043 3044 3045 3046 3047 3048
    num = GetAttr<int>("num", attrs);
    sections = GetAttr<std::vector<int>>("sections", attrs);

    //    for (int i = 0; i < outs_.size(); ++i) {
    //      out_ts_.push_back(*scope.FindVar(outs_[i])->GetMutable());
    //    }
3049
  }
3050
  GType *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3051 3052 3053 3054 3055
  std::vector<GType *> Outs() const { return outs_; }
  int Axis() const { return axis; }
  int Num() const { return num; }
  std::vector<int> Sections() const { return sections; }
  //  std::vector<GType> OutTs() const { return out_ts_; }
3056 3057

 private:
3058
  GType *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3059
  std::vector<GType *> outs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3060
  int axis;
xiebaiyuan's avatar
xiebaiyuan 已提交
3061 3062
  int num;
  std::vector<int> sections;
3063
//  std::vector<GType> out_ts_;
3064 3065 3066 3067 3068 3069 3070 3071 3072
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::SplitArgs fpga_split_args;

 public:
  const fpga::SplitArgs &FpgaArgs() const { return fpga_split_args; }
  void SetFpgaArgs(const fpga::SplitArgs &args) { fpga_split_args = args; }
#endif
3073 3074 3075 3076 3077 3078 3079 3080 3081 3082 3083 3084
};
#endif

#ifdef BILINEAR_INTERP_OP
template <typename Dtype>
class BilinearInterpParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  BilinearInterpParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
3085 3086 3087 3088 3089
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_outsize_ = InputOutSizeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3090 3091
    out_h_ = GetAttr<int>("out_h", attrs);
    out_w_ = GetAttr<int>("out_w", attrs);
3092 3093
    align_corners = GetAttr<bool>("align_corners", attrs);
    align_mode = GetAttr<int>("align_mode", attrs);
X
xiebaiyuan 已提交
3094 3095 3096 3097 3098 3099
    if (HasAttr("scale", attrs)) {
      has_scale_ = true;
      scale_ = GetAttr<float>("scale", attrs);
    }
    LOG(kLOG_DEBUG1) << "has_scale_:  " << has_scale_;
    LOG(kLOG_DEBUG1) << "scale_:  " << scale_;
3100
  }
3101
  const GType *InputX() const { return input_x_; }
3102 3103 3104 3105
  const GType *InputOutPutSize() const { return input_outsize_; }
  GType *Out() const { return out_; }
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }
3106 3107
  bool AlignCorners() const { return align_corners; }
  int AlignMode() const { return align_mode; }
X
xiebaiyuan 已提交
3108 3109
  float Scale() const { return scale_; }
  bool HasScale() const { return has_scale_; }
3110 3111 3112 3113 3114 3115 3116

 private:
  GType *input_x_;
  GType *input_outsize_;
  GType *out_;
  int out_h_;
  int out_w_;
3117 3118
  bool align_corners;
  int align_mode;
X
xiebaiyuan 已提交
3119 3120
  float scale_;
  bool has_scale_;
3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 3134 3135
};
#endif

#ifdef NEAREST_INTERP_OP
template <typename Dtype>
class NearestInterpolationParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  NearestInterpolationParam(const VariableNameMap &inputs,
                            const VariableNameMap &outputs,
                            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3136 3137 3138 3139 3140 3141
    const bool has_out_size = HasVar("OutSize", inputs);

    if (has_out_size) {
      input_outsize_ = InputOutSizeFrom<GType>(inputs, *scope);
    }

3142
    out_ = OutFrom<GType>(outputs, *scope);
3143 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160

    if (HasAttr("out_h", attrs)) {
      out_h_ = GetAttr<int>("out_h", attrs);
    } else if (HasAttr("out_h ", attrs)) {
      // some models hurts ....   attr with space ..
      out_h_ = GetAttr<int>("out_h ", attrs);
    }

    if (HasAttr("out_w", attrs)) {
      out_w_ = GetAttr<int>("out_w", attrs);
    } else if (HasAttr("out_w ", attrs)) {
      // some models hurts ....   attr with space ..
      out_w_ = GetAttr<int>("out_w ", attrs);
    }

    LOG(kLOG_DEBUG1) << "out_h_: " << out_h_;
    LOG(kLOG_DEBUG1) << "out_w_: " << out_w_;

3161 3162 3163 3164
    if (HasAttr("scale", attrs)) {
      has_scale_ = true;
      scale_ = GetAttr<float>("scale", attrs);
    }
3165 3166
    LOG(kLOG_DEBUG1) << "has_scale_:  " << has_scale_;
    LOG(kLOG_DEBUG1) << "scale_:  " << scale_;
3167 3168
  }
  const GType *InputX() const { return input_x_; }
3169 3170
  const GType *InputOutPutSize() const { return input_outsize_; }
  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3171 3172
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }
3173 3174
  float Scale() const { return scale_; }
  bool HasScale() const { return has_scale_; }
3175 3176

 private:
3177 3178 3179
  GType *input_x_;
  GType *input_outsize_;
  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3180 3181
  int out_h_;
  int out_w_;
3182 3183
  float scale_;
  bool has_scale_;
3184 3185 3186 3187 3188 3189 3190 3191 3192 3193 3194
};
#endif

#ifdef SHAPE_OP
template <typename Dtype>
class ShapeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ShapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3195 3196 3197 3198
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
3199
  }
3200 3201
  const GType *Input() const { return input_; }
  GType *Out() const { return out_; }
3202 3203

 private:
3204 3205
  GType *input_;
  GType *out_;
3206 3207 3208
};
#endif

H
hjchen2 已提交
3209 3210 3211 3212 3213 3214 3215 3216
#ifdef TOP_K_OP
template <typename Dtype>
class TopKParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  TopKParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3217 3218 3219 3220 3221
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
    indices_ = OpParam::GetVarValue<GType>("Indices", outputs, *scope);
H
hjchen2 已提交
3222 3223 3224 3225
    k_ = OpParam::GetAttr<int>("k", attrs);
  }

 public:
3226 3227 3228
  GType *input_;
  GType *output_;
  GType *indices_;
H
hjchen2 已提交
3229 3230 3231 3232 3233 3234 3235 3236 3237 3238 3239 3240
  int k_;
};
#endif  // TOP_K_OP

#ifdef CAST_OP
template <typename Dtype>
class CastParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  CastParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3241 3242 3243 3244
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
H
hjchen2 已提交
3245 3246 3247 3248 3249
    input_type_ = OpParam::GetAttr<int>("in_dtype", attrs);
    output_type_ = OpParam::GetAttr<int>("out_dtype", attrs);
  }

 public:
3250 3251
  GType *input_;
  GType *output_;
H
hjchen2 已提交
3252 3253 3254 3255 3256
  int input_type_;
  int output_type_;
};
#endif  // CAST_OP

3257
#ifdef QUANT_OP
3258
template <typename Dtype>
3259 3260 3261 3262 3263
class QuantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3264
  QuantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3265 3266 3267 3268
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3269 3270
    // online
    // scale = max(abs(x))
3271
    online_scale_ = OpParam::GetVarValue<GType>("OutScale", outputs, *scope);
3272
    // offline
3273
    if (inputs.count("InScale")) {
3274
      offline_ = true;
3275
      offline_scale_ = OpParam::GetVarValue<GType>("InScale", inputs, *scope);
3276 3277
    }
    // x = round(scale * x)
3278 3279
    if (OpParam::HasAttr("round_type", attrs)) {
      round_type_ = OpParam::GetAttr<RoundType>("round_type", attrs);
H
hjchen2 已提交
3280
    }
3281 3282 3283 3284
  }

 public:
  // op input
3285
  GType *input_;
3286
  // op output
3287
  GType *output_;
3288
  GType *online_scale_;
3289
  // quantize offline scale
3290
  GType *offline_scale_;
3291 3292
  // if offine scale or not
  bool offline_ = false;
3293
  // round method type
3294 3295
  // RoundType round_type_ = ROUND_NEAREST_AWAY_ZERO;
  RoundType round_type_ = ROUND_NEAREST_TOWARDS_ZERO;
3296
};
3297
#endif
3298

3299
#ifdef DEQUANT_OP
3300
template <typename Dtype>
3301 3302 3303 3304 3305
class DequantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3306
  DequantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3307 3308 3309 3310 3311
                  const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
    activation_scale_ = OpParam::GetVarValue<GType>("Scale", inputs, *scope);
3312
    // dequantization is performed as x = x / static_scale / online_scale
3313 3314
    if (OpParam::HasAttr("weight_scale", attrs)) {
      weight_scale_ = OpParam::GetAttr<float>("weight_scale", attrs);
3315
    } else {
3316
      weight_scale_ = OpParam::GetAttr<float>("max_range", attrs);
3317 3318 3319 3320 3321
    }
  }

 public:
  // op input
3322
  GType *input_;
3323
  // op output
3324
  GType *output_;
3325
  GType *activation_scale_;
3326 3327
  float weight_scale_;
};
3328
#endif
3329

3330 3331 3332 3333
#if defined(FUSION_DEQUANT_BN_OP) || defined(FUSION_DEQUANT_ADD_BN_OP) || \
    defined(FUSION_DEQUANT_ADD_BN_RELU_OP) ||                             \
    defined(FUSION_DEQUANT_BN_RELU_OP) ||                                 \
    defined(FUSION_DEQUANT_ADD_BN_QUANT_OP) ||                            \
3334
    defined(FUSION_DEQUANT_ADD_BN_RELU_QUANT_OP)
H
hjchen2 已提交
3335
template <typename Dtype>
3336
class FusionDequantBNParam : public DequantizeParam<Dtype> {
H
hjchen2 已提交
3337 3338 3339 3340
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3341 3342
  FusionDequantBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
3343
                       const AttributeMap &attrs, Scope *scope)
H
hjchen2 已提交
3344 3345
      : DequantizeParam<Dtype>(inputs, outputs, attrs, scope) {
    // batch norm params
3346 3347 3348 3349
    bn_mean_ = OpParam::GetVarValue<GType>("BNMean", inputs, *scope);
    bn_variance_ = OpParam::GetVarValue<GType>("BNVariance", inputs, *scope);
    bn_scale_ = OpParam::GetVarValue<GType>("BNScale", inputs, *scope);
    bn_bias_ = OpParam::GetVarValue<GType>("BNBias", inputs, *scope);
H
hjchen2 已提交
3350 3351 3352 3353 3354
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
  }

 public:
  // batch norm
3355 3356 3357 3358
  GType *bn_mean_;
  GType *bn_variance_;
  GType *bn_scale_;
  GType *bn_bias_;
H
hjchen2 已提交
3359
  float epsilon_;
3360 3361 3362
};
#endif

3363 3364 3365 3366
#if defined(FUSION_DEQUANT_ADD_BN_RELU_OP) ||  \
    defined(FUSION_DEQUANT_ADD_BN_OP) ||       \
    defined(FUSION_DEQUANT_ADD_BN_QUANT_OP) || \
    defined(FUSION_DEQUANT_ADD_BN_RELU_QUANT_OP)
3367 3368 3369 3370 3371 3372 3373 3374
template <typename Dtype>
class FusionDequantAddBNParam : public FusionDequantBNParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDequantAddBNParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
3375
                          const AttributeMap &attrs, Scope *scope)
3376 3377 3378
      : FusionDequantBNParam<Dtype>(inputs, outputs, attrs, scope) {
    // element wise add params
    axis_ = OpParam::GetAttr<int>("axis", attrs);
3379
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
3380 3381 3382 3383 3384
  }

 public:
  // elementwise add
  int axis_;
3385
  GType *bias_;
3386 3387 3388
};
#endif

3389 3390 3391 3392 3393 3394 3395 3396 3397
#ifdef FUSION_DEQUANT_ADD_BN_QUANT_OP
template <typename Dtype>
class FusionDequantAddBNQuantParam : public FusionDequantAddBNParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDequantAddBNQuantParam(const VariableNameMap &inputs,
                               const VariableNameMap &outputs,
3398
                               const AttributeMap &attrs, Scope *scope)
3399 3400
      : FusionDequantAddBNParam<Dtype>(inputs, outputs, attrs, scope) {
    // scale output
3401
    online_scale_ = OpParam::GetVarValue<GType>("OutScale", outputs, *scope);
3402
    // offline
3403 3404
    if (inputs.count("InScale")) {
      offline_ = true;
3405
      offline_scale_ = OpParam::GetVarValue<GType>("InScale", inputs, *scope);
3406 3407 3408 3409 3410 3411 3412 3413
    }
    // x = round(scale * x)
    if (OpParam::HasAttr("round_type", attrs)) {
      round_type_ = OpParam::GetAttr<RoundType>("round_type", attrs);
    }
  }

 public:
3414
  GType *online_scale_;
3415
  // quantize offline scale
3416
  GType *offline_scale_;
3417 3418
  // if offine scale or not
  bool offline_ = false;
3419 3420 3421 3422 3423 3424
  // round method type
  // RoundType round_type_ = ROUND_NEAREST_AWAY_ZERO;
  RoundType round_type_ = ROUND_NEAREST_TOWARDS_ZERO;
};
#endif

3425 3426 3427 3428 3429 3430 3431 3432 3433
#ifdef SEQUENCE_EXPAND_OP
template <typename Dtype>
class SequenceExpandParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SequenceExpandParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
3434 3435 3436 3437 3438
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3439 3440 3441 3442 3443 3444 3445 3446 3447 3448 3449 3450 3451 3452 3453 3454 3455 3456 3457 3458 3459 3460 3461
    ref_level_ = -1;
    if (OpParam::HasAttr("ref_level", attrs)) {
      ref_level_ = OpParam::GetAttr<int>("ref_level", attrs);
    }
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  int ref_level_;
};
#endif  // SEQUENCE_EXPAND_OP

#ifdef SEQUENCE_POOL_OP
template <typename Dtype>
class SequencePoolParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SequencePoolParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3462 3463 3464 3465
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3466 3467
    pool_type_ = "MAX";
    if (OpParam::HasAttr("pooltype", attrs)) {
H
hjchen2 已提交
3468
      pool_type_ = OpParam::GetStringAttr("pooltype", attrs);
3469 3470 3471 3472 3473 3474 3475 3476 3477 3478
    }
  }

 public:
  GType *input_;
  GType *output_;
  std::string pool_type_;
};
#endif  // SEQUENCE_EXPAND_OP

3479 3480 3481 3482 3483 3484 3485 3486
#ifdef LOD_RESET_OP
template <typename Dtype>
class LodResetParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LodResetParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3487 3488 3489 3490
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3491 3492
    input_y_ = nullptr;
    if (inputs.count("Y")) {
3493
      input_y_ = InputYFrom<GType>(inputs, *scope);
3494 3495 3496
    } else {
      target_lod_ = OpParam::GetAttr<vector<int>>("target_lod", attrs);
    }
Z
zp7 已提交
3497 3498 3499
    if (HasAttr("append", attrs)) {
      append = OpParam::GetAttr<bool>("append", attrs);
    }
3500 3501 3502 3503 3504 3505 3506
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  std::vector<int> target_lod_;
3507
  bool append;
3508 3509 3510
};
#endif  // LOD_RESET_OP

3511 3512 3513 3514 3515 3516 3517 3518
#ifdef LESS_THAN_OP
template <typename Dtype>
class CompareParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  CompareParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3519 3520 3521 3522 3523
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3524 3525 3526 3527 3528 3529 3530 3531 3532 3533 3534
    axis_ = OpParam::GetAttr<int>("axis", attrs);
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  int axis_;
};
#endif  // LESS_THAN_OP

Z
zhaojiaying01 已提交
3535
#if defined(LOGICAL_AND_OP) || defined(LOGICAL_OR_OP) || defined(LOGICAL_XOR_OP)
3536
template <typename Dtype>
Z
zhaojiaying01 已提交
3537
class LogicalBinaryParam : public OpParam {
3538 3539 3540 3541
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
Z
zhaojiaying01 已提交
3542 3543
  LogicalBinaryParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
3544 3545 3546 3547 3548
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3549 3550 3551 3552 3553 3554 3555 3556 3557 3558 3559
  }

  const GType *InputX() const { return input_x_; }
  const GType *InputY() const { return input_y_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
};
Z
zhaojiaying01 已提交
3560
#endif  // LOGICAL_AND_OP LOGICAL_OR_OP LOGICAL_XOR_OP
3561 3562 3563

#ifdef LOGICAL_NOT_OP
template <typename Dtype>
Z
zhaojiaying01 已提交
3564
class LogicalUnaryParam : public OpParam {
3565 3566 3567 3568
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
Z
zhaojiaying01 已提交
3569 3570
  LogicalUnaryParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3571 3572 3573 3574
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3575 3576 3577 3578 3579 3580 3581 3582 3583 3584 3585
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *output_;
};
#endif  // LOGICAL_NOT_OP

3586 3587 3588
#ifdef WRITE_TO_ARRAY_OP
template <typename Dtype>
class WriteToArrayParam : public OpParam {
H
hjchen2 已提交
3589 3590 3591
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

3592 3593 3594
 public:
  WriteToArrayParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3595 3596
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
3597 3598 3599
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    index_ = OpParam::GetVarValue<GType>("I", inputs, *scope);
    output_ = OpParam::GetVarValue<std::vector<GType>>("Out", outputs, *scope);
3600 3601 3602
  }

 public:
H
hjchen2 已提交
3603 3604 3605
  GType *input_;
  GType *index_;
  std::vector<GType> *output_;
3606 3607 3608 3609 3610 3611
};
#endif

#ifdef READ_FROM_ARRAY_OP
template <typename Dtype>
class ReadFromArrayParam : public OpParam {
H
hjchen2 已提交
3612 3613 3614
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

3615 3616 3617
 public:
  ReadFromArrayParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
3618 3619
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
3620 3621 3622
    input_ = OpParam::GetVarValue<std::vector<GType>>("X", inputs, *scope);
    index_ = OpParam::GetVarValue<GType>("I", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
3623 3624 3625
  }

 public:
H
hjchen2 已提交
3626 3627 3628
  std::vector<GType> *input_;
  GType *index_;
  GType *output_;
3629 3630 3631
};
#endif

Z
zhaojiaying01 已提交
3632 3633 3634 3635 3636 3637 3638 3639
#ifdef IS_EMPTY_OP
template <typename Dtype>
class IsEmptyParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  IsEmptyParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3640 3641 3642 3643
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
3644 3645 3646 3647 3648 3649 3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *output_;
};
#endif  // IS_EMPTY_OP

#ifdef INCREMENT_OP
template <typename Dtype>
class IncrementParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  IncrementParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
3663
                 const AttributeMap &attrs, Scope *scope)
3664
      : OpParam(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
3665 3666
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
H
update  
hjchen2 已提交
3667
    step_ = OpParam::GetAttr<float>("step", attrs);
Z
zhaojiaying01 已提交
3668 3669 3670 3671
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }
H
update  
hjchen2 已提交
3672
  float Step() const { return step_; }
Z
zhaojiaying01 已提交
3673 3674 3675 3676

 public:
  GType *input_x_;
  GType *output_;
H
update  
hjchen2 已提交
3677
  float step_;
Z
zhaojiaying01 已提交
3678 3679
};
#endif  // INCREMENT_OP
3680 3681
#ifdef PAD2D_OP
template <typename Dtype>
3682
class Pad2DParam : public OpParam {
3683 3684 3685 3686
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3687
  Pad2DParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3688 3689 3690 3691
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
3692 3693 3694 3695
    paddings_ = OpParam::GetAttr<std::vector<int>>("paddings", attrs);
    pad_value_ = OpParam::GetAttr<float>("pad_value", attrs);
    mode_ = OpParam::GetStringAttr("mode", attrs);
    DLOG << "mode" << mode_;
3696
  }
3697 3698 3699 3700 3701 3702
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }

  std::vector<int> paddings_;
  float pad_value_;
  std::string mode_;
3703 3704

 private:
3705 3706
  GType *input_x_;
  GType *out_;
3707 3708
};
#endif
H
Huie 已提交
3709 3710 3711 3712 3713
#ifdef EXP_OP
template <typename Dtype>
class EXPParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
Z
zhaojiaying01 已提交
3714

H
Huie 已提交
3715 3716 3717 3718 3719 3720 3721 3722 3723 3724 3725 3726 3727 3728 3729
 public:
  EXPParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
  }
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }

 private:
  GType *input_x_;
  GType *out_;
};
#endif
3730 3731 3732 3733 3734 3735 3736 3737 3738 3739 3740 3741 3742 3743 3744 3745 3746 3747 3748 3749 3750 3751 3752 3753 3754 3755 3756 3757 3758 3759

#ifdef PIXEL_SHUFFLE_OP
template <typename Dtype>
class PixelShuffleParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  PixelShuffleParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    upscale_factor_ = GetAttr<int>("upscale_factor", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *Out() const { return out_; }

  const int &upscale_factor() const { return upscale_factor_; }

 private:
  GType *input_x_;
  GType *out_;
  int upscale_factor_;
};
#endif

3760
#ifdef GRID_SAMPLER_OP
3761
template <typename Dtype>
3762
class GridSamplerParam : public OpParam {
3763 3764 3765 3766
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3767 3768 3769
  GridSamplerParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
                   Scope *scope)
3770 3771
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3772
    grid_ = GridFrom<GType>(inputs, *scope);
3773
    output_ = OutputFrom<GType>(outputs, *scope);
3774 3775 3776
  }

  const GType *InputX() const { return input_x_; }
3777
  const GType *Grid() const { return grid_; }
3778

3779
  GType *Output() const { return output_; }
3780 3781 3782

 private:
  GType *input_x_;
3783
  GType *grid_;
3784
  GType *output_;
3785 3786 3787
};
#endif

3788
#ifdef EXPAND_OP
3789
template <typename Dtype>
3790
class ExpandParam : public OpParam {
3791 3792 3793 3794
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3795 3796
  ExpandParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const AttributeMap &attrs, Scope *scope)
3797 3798
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3799 3800
    out_ = OutFrom<GType>(outputs, *scope);
    expand_times = OpParam::GetAttr<std::vector<int>>("expand_times", attrs);
3801 3802 3803 3804
  }

  const GType *InputX() const { return input_x_; }

3805 3806 3807
  GType *Out() const { return out_; }

  std::vector<int> expand_times;
3808 3809 3810

 private:
  GType *input_x_;
3811
  GType *out_;
3812 3813
};

3814
#endif
朔-望's avatar
朔-望 已提交
3815 3816
}  // namespace operators
}  // namespace paddle_mobile