op_param.h 113.4 KB
Newer Older
W
wangliu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
朔-望's avatar
朔-望 已提交
14

15
#pragma once
朔-望's avatar
朔-望 已提交
16

17
#include <memory>
E
eclipsess 已提交
18
#include <string>
W
wangliu 已提交
19
#include <vector>
L
liuruilong 已提交
20
#include "common/log.h"
朔-望's avatar
朔-望 已提交
21
#include "common/type_define.h"
N
nhzlx 已提交
22
#include "common/types.h"
23
#include "framework/attribute.h"
朔-望's avatar
朔-望 已提交
24 25 26
#include "framework/lod_tensor.h"
#include "framework/scope.h"
#include "framework/tensor.h"
27
#include "framework/type_trait.h"
朔-望's avatar
朔-望 已提交
28
#include "framework/variable.h"
Z
zhangyang 已提交
29 30 31 32 33 34 35

#ifdef PADDLE_MOBILE_FPGA_V1
#include "fpga/V1/api.h"
#endif

#ifdef PADDLE_MOBILE_FPGA_V2
#include "fpga/V2/api.h"
Z
zhangyang 已提交
36
#endif
朔-望's avatar
朔-望 已提交
37

C
Chon 已提交
38 39 40 41
#ifdef PADDLE_MOBILE_FPGA_KD
#include "fpga/KD/context.hpp"
#endif

L
liuruilong 已提交
42 43
#ifdef PADDLE_MOBILE_CL
#include "framework/cl/cl_image.h"
Z
zhangyang 已提交
44
#endif
朔-望's avatar
朔-望 已提交
45 46

namespace paddle_mobile {
朔-望's avatar
朔-望 已提交
47 48
namespace operators {

W
wangliu 已提交
49 50 51 52 53
using framework::Attribute;
using framework::AttributeMap;
using framework::LoDTensor;
using framework::Scope;
using framework::Tensor;
E
eclipsess 已提交
54
using framework::Variable;
W
wangliu 已提交
55 56
using std::string;
using std::vector;
朔-望's avatar
朔-望 已提交
57

58
using framework::DtypeTensorTrait;
L
liuruilong 已提交
59

60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
template <typename Dtype>
class CLImageDeleter {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  void operator()(GType *ptr) {
#ifdef PADDLE_MOBILE_CL
    framework::CLImage *image = dynamic_cast<framework::CLImage *>(ptr);
    if (image) {
      delete image;
    }
#endif
  }
};

L
liuruilong 已提交
75
class OpParam {
76 77
 public:
  OpParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
78 79
          const AttributeMap &attrs, Scope *scope)
      : scope_(scope) {}
80

81 82
  Scope *GetScope() const { return scope_; }
  Scope *scope_ = nullptr;
83

C
Chon 已提交
84 85 86 87 88 89
#ifdef PADDLE_MOBILE_FPGA_KD
  zynqmp::Context &context() { return context_; }

  zynqmp::Context context_;
#endif

朔-望's avatar
朔-望 已提交
90
 protected:
xiebaiyuan's avatar
xiebaiyuan 已提交
91 92 93 94
  template <typename T>
  static T *InputH0From(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("H0", inputs, scope);
  }
Z
zhaojiaying01 已提交
95 96 97 98 99 100 101

  template <typename T>
  static T *InputHiddenPrevFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("HiddenPrev", inputs, scope);
  }

102 103 104 105 106
  template <typename T>
  static T *InputAlphaFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Alpha", inputs, scope);
  }

107 108 109 110 111 112 113 114 115
  template <typename T>
  static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Input", inputs, scope);
  }

  template <typename T>
  static T *InputXFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("X", inputs, scope);
  }
116 117 118 119 120
  template <typename T>
  static T *InputOutSizeFrom(const VariableNameMap &inputs,
                             const Scope &scope) {
    return GetVarValue<T>("OutSize", inputs, scope);
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147

  template <typename T>
  static T *InputWFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("W", inputs, scope);
  }

  template <typename T>
  static T *InputIdsFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Ids", inputs, scope);
  }

  template <typename T>
  static T *InputEmissionFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Emission", inputs, scope);
  }

  template <typename T>
  static T *InputTransitionFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("Transition", inputs, scope);
  }
  template <typename T>
  static T *InputLabelFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Label", inputs, scope);
  }

148 149 150 151
  template <typename T>
  static T *InputXFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("addX", inputs, scope);
  }
152 153 154 155 156 157

  template <typename T>
  static T *InputYFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Y", inputs, scope);
  }

158 159 160 161 162
  template <typename T>
  static T *InputYFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("Y", inputs, scope);
  }

E
eclipsess 已提交
163 164 165 166 167
  template <typename T>
  static T *InputZFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Z", inputs, scope);
  }

168 169 170 171 172
  template <typename T>
  static T *InputBiasFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Bias", inputs, scope);
  }
  template <typename T>
xiebaiyuan's avatar
xiebaiyuan 已提交
173 174 175 176
  static T *InputWeightFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Weight", inputs, scope);
  }
  template <typename T>
177 178 179 180 181 182 183 184 185 186 187 188
  static T *InputVarianceFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Variance", inputs, scope);
  }
  template <typename T>
  static T *InputMeanFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Mean", inputs, scope);
  }
  template <typename T>
  static T *InputScaleFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scale", inputs, scope);
  }
E
eclipsess 已提交
189 190 191 192
  template <typename T>
  static T *InputImageFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Image", inputs, scope);
  }
E
eclipsess 已提交
193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
  template <typename T>
  static T *InputPriorBoxFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("PriorBox", inputs, scope);
  }
  template <typename T>
  static T *InputPriorBoxVarFrom(const VariableNameMap &inputs,
                                 const Scope &scope) {
    return GetVarValue<T>("PriorBoxVar", inputs, scope);
  }
  // LoDTensor but now use Tensor
  template <typename T>
  static T *InputTargetBoxFrom(const VariableNameMap &inputs,
                               const Scope &scope) {
    return GetVarValue<T>("TargetBox", inputs, scope);
  }
209

E
eclipsess 已提交
210 211 212 213 214 215 216 217 218 219
  template <typename T>
  static T *InputBBoxesFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("BBoxes", inputs, scope);
  }

  template <typename T>
  static T *InputScoresFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scores", inputs, scope);
  }

E
eclipsess 已提交
220 221 222 223
  template <typename T>
  static T *InputShapeFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Shape", inputs, scope);
  }
E
eclipsess 已提交
224

225
  template <typename T>
W
wangliu 已提交
226 227
  static vector<T *> InputMultiFrom(const VariableNameMap &inputs,
                                    const Scope &scope) {
228 229 230
    return GetMultiVarValue<T>("X", inputs, scope);
  }

E
eclipsess 已提交
231 232 233 234 235
  static vector<Variable *> InputMultiVarsFrom(const VariableNameMap &inputs,
                                               const Scope &scope) {
    return GetMultiVar("X", inputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
236 237 238 239 240 241
  template <typename T>
  static T *OutputBatchGateFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("BatchGate", outputs, scope);
  }

Z
zhaojiaying01 已提交
242 243 244 245 246
  template <typename T>
  static T *OutputGateFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Gate", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
247 248 249 250 251 252 253 254 255 256 257
  template <typename T>
  static T *OutputViterbiPathFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("ViterbiPath", outputs, scope);
  }
  template <typename T>
  static T *OutputBatchResetHiddenPrevFrom(const VariableNameMap &outputs,
                                           const Scope &scope) {
    return GetVarValue<T>("BatchResetHiddenPrev", outputs, scope);
  }

Z
zhaojiaying01 已提交
258 259 260 261 262 263
  template <typename T>
  static T *OutputResetHiddenPrevFrom(const VariableNameMap &outputs,
                                      const Scope &scope) {
    return GetVarValue<T>("ResetHiddenPrev", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
264 265 266 267 268 269 270 271 272 273 274 275
  template <typename T>
  static T *OutputBatchHiddenFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("BatchHidden", outputs, scope);
  }

  template <typename T>
  static T *OutputHiddenFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("Hidden", outputs, scope);
  }

276 277 278 279 280
  template <typename T>
  static T *OutputFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Output", outputs, scope);
  }

E
eclipsess 已提交
281 282 283 284 285
  static Variable *OutVarFrom(const VariableNameMap &outputs,
                              const Scope &scope) {
    return GetVar("Out", outputs, scope);
  }

286 287 288 289 290
  template <typename T>
  static T *OutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Out", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
291 292 293 294 295 296
  template <typename T>
  static vector<T *> OutMultiFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetMultiVarValue<T>("Out", outputs, scope);
  }

297 298 299 300 301
  template <typename T>
  static T *OutputYFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Y", outputs, scope);
  }

L
lijiancheng0614 已提交
302 303 304 305 306 307
  template <typename T>
  static T *OutputXShapeFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("XShape", outputs, scope);
  }

E
eclipsess 已提交
308 309 310 311 312 313
  template <typename T>
  static T *OutputBoxesFrom(const VariableNameMap &outputs,
                            const Scope &scope) {
    return GetVarValue<T>("Boxes", outputs, scope);
  }

E
eclipsess 已提交
314 315 316 317 318
  template <typename T>
  static T *OutputBoxFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("OutputBox", outputs, scope);
  }

Z
zhaojiaying01 已提交
319 320 321 322 323
  template <typename T>
  static T *OutputNormFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Norm", outputs, scope);
  }

E
eclipsess 已提交
324 325 326 327 328 329
  template <typename T>
  static T *OutputVariancesFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("Variances", outputs, scope);
  }

330 331 332 333 334 335 336 337 338 339
  template <typename T>
  static T *MidOutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("MidOut", outputs, scope);
  }

  template <typename T>
  static T *FilterFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Filter", inputs, scope);
  }

340 341 342 343 344
  template <typename T>
  static T *GridFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Grid", inputs, scope);
  }

345
  template <typename T>
W
wangliu 已提交
346
  static const T GetAttr(const string &key, const AttributeMap &map) {
347 348
    PADDLE_MOBILE_ENFORCE(HasAttr(key, map), "%s is not contained in attr map",
                          key.c_str())
349 350
    return ((Attribute)map.at(key)).Get<T>();
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
351 352
  static const std::string GetStringAttr(const string &key,
                                         const AttributeMap &map) {
353 354
    PADDLE_MOBILE_ENFORCE(HasAttr(key, map), "%s is not contained in attr map",
                          key.c_str())
355 356
    return ((Attribute)map.at(key)).GetString();
  }
357

358 359 360 361
  static const bool HasAttr(const string &key, const AttributeMap &map) {
    return map.count(key) > 0;
  }

362 363 364 365
  static const bool HasVar(const string &key, const VariableNameMap &var_map) {
    return var_map.count(key) > 0;
  }

366
  template <typename T>
W
wangliu 已提交
367
  static T *GetVarValue(const string &key, const VariableNameMap &var_map,
368
                        const Scope &scope) {
W
wangliu 已提交
369 370
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
371 372 373 374 375 376
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
朔-望's avatar
朔-望 已提交
377
    }
378
  }
朔-望's avatar
朔-望 已提交
379

E
eclipsess 已提交
380 381 382 383 384 385 386 387 388 389 390 391 392
  static Variable *GetVar(const string &key, const VariableNameMap &var_map,
                          const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var;
    } else {
      return nullptr;
    }
  }

393
  static std::string Getkey(const string &key, const VariableNameMap &var_map,
394
                            int index) {
395 396
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > index,
                          "%s is not contained in var_map", key.c_str())
397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414
    auto var_vec = var_map.at(key);
    return var_vec[index];
  }

  template <typename T>
  static T *GetVarValue1(const string &key, const VariableNameMap &var_map,
                         const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[1]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
    }
  }

415
  template <typename T>
W
wangliu 已提交
416 417 418
  static vector<T *> GetMultiVarValue(const string &key,
                                      const VariableNameMap &var_map,
                                      const Scope &scope) {
419 420
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
W
wangliu 已提交
421
    vector<T *> var_res;
422 423 424
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var->GetMutable<T>());
朔-望's avatar
朔-望 已提交
425
    }
426 427
    return var_res;
  }
E
eclipsess 已提交
428 429 430 431 432 433 434 435 436 437 438 439 440

  static vector<Variable *> GetMultiVar(const string &key,
                                        const VariableNameMap &var_map,
                                        const Scope &scope) {
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
    vector<Variable *> var_res;
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var);
    }
    return var_res;
  }
朔-望's avatar
朔-望 已提交
441 442
};

443 444 445 446 447 448
#define GET_VAR_AS_TENSOR(name, name_dict, scope) \
  OpParam::GetVarValue<framework::Tensor>(name, name_dict, scope)

#define GET_VAR_AS_LOD_TENSOR(name, name_dict, scope) \
  OpParam::GetVarValue<framework::LoDTensor>(name, name_dict, scope)

N
nhzlx 已提交
449
template <typename Dtype>
450
class ConvParam : public OpParam {
N
nhzlx 已提交
451 452 453
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
454
 public:
455
  ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
456 457 458 459
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    filter_ = OpParam::FilterFrom<GType>(inputs, *scope);
    input_ = OpParam::InputFrom<GType>(inputs, *scope);
460
    if (outputs.count("Output")) {
461
      output_ = OpParam::OutputFrom<GType>(outputs, *scope);
462 463 464 465 466
    }
    strides_ = OpParam::GetAttr<vector<int>>("strides", attrs);
    paddings_ = OpParam::GetAttr<vector<int>>("paddings", attrs);
    dilations_ = OpParam::GetAttr<vector<int>>("dilations", attrs);
    groups = OpParam::GetAttr<int>("groups", attrs);
467
  }
朔-望's avatar
朔-望 已提交
468

469
  const GType *Input() const { return input_; }
朔-望's avatar
朔-望 已提交
470

471
  GType *Filter() const { return filter_; }
朔-望's avatar
朔-望 已提交
472

473
  GType *Output() const { return output_; }
朔-望's avatar
朔-望 已提交
474

W
wangliu 已提交
475
  const vector<int> &Strides() const { return strides_; }
朔-望's avatar
朔-望 已提交
476

W
wangliu 已提交
477
  const vector<int> &Paddings() const { return paddings_; }
朔-望's avatar
朔-望 已提交
478

W
wangliu 已提交
479
  const vector<int> &Dilations() const { return dilations_; }
朔-望's avatar
朔-望 已提交
480

H
hjchen2 已提交
481 482 483
  enum ExecMode {
    EXEC_INVALID = 0,
    EXEC_GEMM_FLOAT,
484 485
    EXEC_DEPTHWISE3x3S1_FLOAT,
    EXEC_DEPTHWISE3x3S2_FLOAT,
H
hjchen2 已提交
486 487
    EXEC_WINOGRAD3X3_FLOAT,
    EXEC_WINOGRAD5X5_FLOAT,
488
    EXEC_DEPTHWISE5x5_FLOAT,
H
hjchen2 已提交
489
    EXEC_GEMM_INT8,
H
hjchen2 已提交
490
    EXEC_DEPTHWISE3x3_INT8,
491
    EXEC_DEPTHWISE5x5_INT8,
S
StarryRain 已提交
492 493
    EXEC_SLIDINGWINDOW3x3S1_FLOAT,
    EXEC_SLIDINGWINDOW3x3S2_FLOAT,
494 495 496 497 498
    EXEC_DEPTHWISE3x3_FLOAT,
    EXEC_SLIDINGWINDOW1x1_FLOAT,
    EXEC_SLIDINGWINDOW3x3_FLOAT,
    EXEC_SLIDINGWINDOW5x5_FLOAT,
    EXEC_SLIDINGWINDOW7x7_FLOAT,
499
    EXEC_GEMM1x1s1_FLOAT,
500
    EXEC_DEPTHWISEBASIC_FLOAT,
H
hjchen2 已提交
501 502 503 504
  };

  ExecMode &ExecMode() const { return exec_mode_; }

505
  const int &Groups() const { return groups; }
朔-望's avatar
朔-望 已提交
506

507 508 509 510 511 512 513
#ifdef PADDLE_MOBILE_CL
  int Offset() const { return offset_; }

  int SetOffset(int in_offset) { offset_ = in_offset; }

#endif

H
hjchen2 已提交
514
 public:
515 516 517 518
  GType *input_;
  GType *output_;
  GType *filter_;
  GType *transformed_filter_;
W
wangliu 已提交
519 520 521
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
H
hjchen2 已提交
522
  mutable enum ExecMode exec_mode_;
523
  int groups;
524 525 526 527

#ifdef PADDLE_MOBILE_CL
  int offset_;
#endif
Z
zhangyang 已提交
528 529 530

#ifdef PADDLE_MOBILE_FPGA

H
hjchen2 已提交
531
 public:
Z
zhangyang 已提交
532 533 534 535 536
  fpga::SplitConvArgs fpga_conv_args;

 public:
  const fpga::SplitConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::SplitConvArgs &args) { fpga_conv_args = args; }
537 538 539 540 541 542 543

 public:
  fpga::DWconvArgs fpga_dwconv_args;

 public:
  const fpga::DWconvArgs &FpgaDwconvArgs() const { return fpga_dwconv_args; }
  void SetFpgaArgs(const fpga::DWconvArgs &args) { fpga_dwconv_args = args; }
Z
zhangyang 已提交
544
#endif
朔-望's avatar
朔-望 已提交
545
};
N
nhzlx 已提交
546 547
template <typename Dtype>
Print &operator<<(Print &printer, const ConvParam<Dtype> &conv_param);
朔-望's avatar
朔-望 已提交
548

N
nhzlx 已提交
549
template <typename Dtype>
550
class ElementwiseAddParam : public OpParam {
N
nhzlx 已提交
551 552 553
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
554
 public:
555
  ElementwiseAddParam(const VariableNameMap &inputs,
556
                      const VariableNameMap &outputs, const AttributeMap &attrs,
557 558 559 560 561
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
562 563 564
    axis_ = GetAttr<int>("axis", attrs);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
565
  const GType *InputX() const { return input_x_; }
566

xiebaiyuan's avatar
xiebaiyuan 已提交
567
  const GType *InputY() const { return input_y_; }
568

xiebaiyuan's avatar
xiebaiyuan 已提交
569
  GType *Out() const { return out_; }
570 571 572

  const int &Axis() const { return axis_; }

朔-望's avatar
朔-望 已提交
573
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
574 575 576
  GType *input_x_;
  GType *input_y_;
  GType *out_;
577
  int axis_;
Z
zhangyang 已提交
578 579 580
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
581
  fpga::EWAddArgs fpga_EW_add_args;
Z
zhangyang 已提交
582 583

 public:
H
hanbuhe 已提交
584 585
  const fpga::EWAddArgs &FpgaArgs() const { return fpga_EW_add_args; }
  void SetFpgaArgs(const fpga::EWAddArgs &args) { fpga_EW_add_args = args; }
qnqinan's avatar
qnqinan 已提交
586 587 588 589

 public:
  Tensor float_input_x, float_out;

Z
zhangyang 已提交
590
#endif
朔-望's avatar
朔-望 已提交
591 592
};

E
eclipsess 已提交
593
#ifdef ELEMENTWISEMUL_OP
E
eclipsess 已提交
594
template <typename Dtype>
595
class ElementwiseMulParam : public OpParam {
E
eclipsess 已提交
596 597 598 599 600 601
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ElementwiseMulParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
602 603 604 605 606
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622
    axis_ = GetAttr<int>("axis", attrs);
  }

  const GType *InputX() const { return input_x_; }

  const GType *InputY() const { return input_y_; }

  GType *Out() const { return out_; }

  const int &Axis() const { return axis_; }

 private:
  GType *input_x_;
  GType *input_y_;
  GType *out_;
  int axis_;
qnqinan's avatar
qnqinan 已提交
623 624 625 626 627 628
#ifdef PADDLE_MOBILE_FPGA

 public:
  Tensor float_input_x, float_out;

#endif
E
eclipsess 已提交
629
};
S
suiyang 已提交
630
#endif
E
eclipsess 已提交
631

632
#ifdef FUSION_ELEMENTWISEADDRELU_OP
N
nhzlx 已提交
633 634
template <typename Dtype>
using ElementwiseAddReluParam = ElementwiseAddParam<Dtype>;
L
liuruilong 已提交
635 636
#endif

637
#ifdef ELEMENTWISESUB_OP
638
template <typename Dtype>
639
class ElementwiseSubParam : public OpParam {
640 641 642 643 644 645
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ElementwiseSubParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
646 647 648 649 650
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667
    axis_ = GetAttr<int>("axis", attrs);
  }

  const GType *InputX() const { return input_x_; }

  const GType *InputY() const { return input_y_; }

  GType *Out() const { return out_; }

  const int &Axis() const { return axis_; }

 private:
  GType *input_x_;
  GType *input_y_;
  GType *out_;
  int axis_;
};
668
#endif
669

L
liuruilong 已提交
670
#ifdef MUL_OP
N
nhzlx 已提交
671
template <typename Dtype>
672
class MulParam : public OpParam {
N
nhzlx 已提交
673 674 675
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
676
 public:
677
  MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
678 679 680 681 682
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
683 684 685
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
  }
朔-望's avatar
朔-望 已提交
686

687
  GType *InputX() const { return input_x_; }
朔-望's avatar
朔-望 已提交
688

689
  GType *InputY() const { return input_y_; }
朔-望's avatar
朔-望 已提交
690

xiebaiyuan's avatar
xiebaiyuan 已提交
691
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
692

693
  const int &XNumColDims() const { return x_num_col_dims_; }
朔-望's avatar
朔-望 已提交
694

695
  const int &YNumColDims() const { return y_num_col_dims_; }
朔-望's avatar
朔-望 已提交
696

朔-望's avatar
朔-望 已提交
697
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
698 699 700
  GType *input_x_;
  GType *input_y_;
  GType *out_;
701 702
  int x_num_col_dims_;
  int y_num_col_dims_;
朔-望's avatar
朔-望 已提交
703
};
L
liuruilong 已提交
704
#endif
朔-望's avatar
朔-望 已提交
705

L
liuruilong 已提交
706
#ifdef CONCAT_OP
N
nhzlx 已提交
707
template <typename Dtype>
朔-望's avatar
朔-望 已提交
708
class ConcatParam : public OpParam {
N
nhzlx 已提交
709 710 711
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
712
 public:
713
  ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
714 715 716 717
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    inputs_ = InputMultiFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
718
    axis_ = GetAttr<int>("axis", attrs);
719
    original_output_dims_size_ = out_->dims().size();
720
  }
朔-望's avatar
朔-望 已提交
721

N
nhzlx 已提交
722
  vector<GType *> Inputs() const { return inputs_; }
朔-望's avatar
朔-望 已提交
723

xiebaiyuan's avatar
xiebaiyuan 已提交
724
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
725

726
  const int &Axis() const { return axis_; }
朔-望's avatar
朔-望 已提交
727

728
 public:
N
nhzlx 已提交
729
  vector<GType *> inputs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
730
  GType *out_;
731
  int axis_;
732
  int original_output_dims_size_;
Z
zhangyang 已提交
733 734 735 736 737 738 739 740 741
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::ConcatArgs fpga_concat_args;

 public:
  const fpga::ConcatArgs &FpgaArgs() const { return fpga_concat_args; }
  void SetFpgaArgs(const fpga::ConcatArgs &args) { fpga_concat_args = args; }
#endif
朔-望's avatar
朔-望 已提交
742
};
L
liuruilong 已提交
743
#endif
朔-望's avatar
朔-望 已提交
744

E
eclipsess 已提交
745 746 747 748 749 750 751 752
#ifdef SUM_OP
template <typename Dtype>
class SumParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SumParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
753 754 755 756 757 758
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    inputs_vars_ = InputMultiVarsFrom(inputs, *scope);
    out_var_ = OutVarFrom(outputs, *scope);
    inputs_ = InputMultiFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776
  }

  vector<Variable *> InputsVars() const { return inputs_vars_; }

  Variable *OutVar() const { return out_var_; }

  vector<GType *> Inputs() const { return inputs_; }

  GType *Out() const { return out_; }

 private:
  vector<Variable *> inputs_vars_;
  Variable *out_var_;
  vector<GType *> inputs_;
  GType *out_;
};
#endif

L
liuruilong 已提交
777
#ifdef LRN_OP
N
nhzlx 已提交
778
template <typename Dtype>
E
eclipsess 已提交
779
class LrnParam : public OpParam {
N
nhzlx 已提交
780 781 782
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
783
 public:
784
  LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
785 786 787 788 789
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    mid_out_ = MidOutFrom<GType>(outputs, *scope);
790 791 792 793
    n_ = GetAttr<int>("n", attrs);
    alpha_ = GetAttr<float>("alpha", attrs);
    beta_ = GetAttr<float>("beta", attrs);
    k_ = GetAttr<float>("k", attrs);
794
    data_format_ = GetStringAttr("data_format", attrs);
795
  }
E
eclipsess 已提交
796

797
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
798

799
  GType *Out() const { return out_; }
E
eclipsess 已提交
800

801
  GType *MidOut() const { return mid_out_; }
E
eclipsess 已提交
802

803
  const int &N() const { return n_; }
E
eclipsess 已提交
804

805
  const float &Alpha() const { return alpha_; }
E
eclipsess 已提交
806

807
  const float &Beta() const { return beta_; }
E
eclipsess 已提交
808

809
  const float &K() const { return k_; }
E
eclipsess 已提交
810

W
wangliu 已提交
811
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
812

朔-望's avatar
朔-望 已提交
813
 private:
814 815 816
  GType *input_x_;
  GType *out_;
  GType *mid_out_;
817 818 819 820
  int n_;
  float alpha_;
  float beta_;
  float k_;
W
wangliu 已提交
821
  string data_format_;
E
eclipsess 已提交
822
};
L
liuruilong 已提交
823 824
#endif

Z
zhaojiaying01 已提交
825 826
#ifdef NORM_OP
template <typename Dtype>
827
class NormParam : public OpParam {
Z
zhaojiaying01 已提交
828 829 830 831 832
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  NormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
833 834 835 836 837
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_norm_ = OutputNormFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
838 839 840 841
    epsilon_ = GetAttr<float>("epsilon", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }

842
  const GType *InputX() const { return input_x_; }
Z
zhaojiaying01 已提交
843

844
  GType *Out() const { return out_; }
Z
zhaojiaying01 已提交
845

846
  GType *OutputNorm() const { return output_norm_; }
Z
zhaojiaying01 已提交
847 848 849 850 851 852

  const float &Epsilon() const { return epsilon_; }

  const int &Axis() const { return axis_; }

 private:
853 854 855
  GType *input_x_;
  GType *out_;
  GType *output_norm_;
Z
zhaojiaying01 已提交
856 857 858 859 860
  float epsilon_;
  int axis_;
};
#endif

L
liuruilong 已提交
861
#ifdef BATCHNORM_OP
N
nhzlx 已提交
862
template <typename Dtype>
863
class BatchNormParam : public OpParam {
N
nhzlx 已提交
864 865 866
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
867
 public:
868
  BatchNormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
869 870 871 872 873 874 875 876
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_y_ = OutputYFrom<GType>(outputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, *scope);
877 878
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
L
liuruilong 已提交
879
    //    is_test_ = GetAttr<bool>("is_test", attrs);
880
  }
E
eclipsess 已提交
881

882
  ~BatchNormParam() {}
883

884
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
885

886
  GType *OutputY() const { return output_y_; }
E
eclipsess 已提交
887

888
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
889

890
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
891

892
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
893

894
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
895

896
  const float &Epsilon() const { return epsilon_; }
E
eclipsess 已提交
897

898
  const float &Momentum() const { return momentum_; }
E
eclipsess 已提交
899

900
  const bool &IsTest() const { return is_test_; }
E
eclipsess 已提交
901

W
wangliu 已提交
902
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
903

904 905 906
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
907

908 909 910
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
911

912
  const GType *NewScale() const { return new_scale_.get(); }
913

914
  const GType *NewBias() const { return new_bias_.get(); }
915

朔-望's avatar
朔-望 已提交
916
 private:
917 918 919 920 921 922
  GType *input_x_;
  GType *output_y_;
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
923 924 925
  float epsilon_;
  float momentum_;
  bool is_test_;
W
wangliu 已提交
926
  string data_format_;
927 928
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
E
eclipsess 已提交
929
};
L
liuruilong 已提交
930 931
#endif

932 933 934 935 936 937 938 939 940 941 942 943
#ifdef INSTANCENORM_OP
template <typename Dtype>
class InstanceNormParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  InstanceNormParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972
    output_y_ = OutputYFrom<GType>(outputs, *scope);
    epsilon_ = GetAttr<float>("epsilon", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *OutputY() const { return output_y_; }

  const float &Epsilon() const { return epsilon_; }

 private:
  GType *input_x_;
  GType *output_y_;
  float epsilon_;
};
#endif

#ifdef FUSION_INSTANCENORM_RELU_OP
template <typename Dtype>
class FusionInstanceNormReluParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionInstanceNormReluParam(const VariableNameMap &inputs,
                              const VariableNameMap &outputs,
                              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989
    out_ = OutFrom<GType>(outputs, *scope);
    epsilon_ = GetAttr<float>("epsilon", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *Out() const { return out_; }

  const float &Epsilon() const { return epsilon_; }

 private:
  GType *input_x_;
  GType *out_;
  float epsilon_;
};
#endif

L
liuruilong 已提交
990
#ifdef POOL_OP
N
nhzlx 已提交
991
template <typename Dtype>
992
class PoolParam : public OpParam {
N
nhzlx 已提交
993 994 995
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
996
 public:
997
  PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
998 999 1000
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
1001

1002
    output_ = OutFrom<GType>(outputs, *scope);
1003
    pooling_type_ = GetStringAttr("pooling_type", attrs);
W
wangliu 已提交
1004 1005 1006
    ksize_ = GetAttr<vector<int>>("ksize", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
1007
    ceil_mode_ = GetAttr<bool>("ceil_mode", attrs);
1008
    global_pooling_ = GetAttr<bool>("global_pooling", attrs);
1009 1010 1011 1012 1013 1014

    if (HasAttr("exclusive", attrs)) {
      exclusive_ = GetAttr<bool>("exclusive", attrs);
    } else {
      exclusive_ = true;
    }
1015
  }
1016

1017
  const GType *Input() const { return input_; }
1018

1019
  GType *Output() const { return output_; }
1020

W
wangliu 已提交
1021
  const string &PoolingType() const { return pooling_type_; }
1022

W
wangliu 已提交
1023
  const vector<int> &Ksize() const { return ksize_; }
1024

W
wangliu 已提交
1025
  const vector<int> &Strides() const { return strides_; }
1026

W
wangliu 已提交
1027
  const vector<int> &Paddings() const { return paddings_; }
1028

1029
  bool isCeilMode() const { return ceil_mode_; }
1030

Z
zhangyang 已提交
1031
  bool isGlobalPooling() const { return global_pooling_; }
1032

1033 1034
  bool isExclusive() const { return exclusive_; }

朔-望's avatar
朔-望 已提交
1035
 private:
1036 1037
  GType *input_;
  GType *output_;
W
wangliu 已提交
1038 1039 1040 1041
  string pooling_type_;
  vector<int> ksize_;
  vector<int> strides_;
  vector<int> paddings_;
1042
  bool ceil_mode_;
1043
  bool global_pooling_ = false;
1044
  bool exclusive_ = true;
Z
zhangyang 已提交
1045
#ifdef PADDLE_MOBILE_FPGA
1046 1047

 private:
H
hanbuhe 已提交
1048
  fpga::PoolingArgs fpga_pool_args;
Z
zhangyang 已提交
1049 1050

 public:
H
hanbuhe 已提交
1051 1052
  const fpga::PoolingArgs &FpgaArgs() const { return fpga_pool_args; }
  void SetFpgaArgs(const fpga::PoolingArgs &args) { fpga_pool_args = args; }
Z
zhangyang 已提交
1053
#endif
1054
};
L
liuruilong 已提交
1055 1056 1057
#endif

#ifdef PRIORBOX_OP
N
nhzlx 已提交
1058
template <typename Dtype>
E
eclipsess 已提交
1059
class PriorBoxParam : public OpParam {
N
nhzlx 已提交
1060 1061 1062
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1063 1064
 public:
  PriorBoxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1065 1066 1067 1068 1069 1070
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    input_image_ = InputImageFrom<GType>(inputs, *scope);
    output_boxes_ = OutputBoxesFrom<GType>(outputs, *scope);
    output_variances_ = OutputVariancesFrom<GType>(outputs, *scope);
W
wangliu 已提交
1071 1072 1073 1074
    min_sizes_ = GetAttr<vector<float>>("min_sizes", attrs);
    max_sizes_ = GetAttr<vector<float>>("max_sizes", attrs);
    aspect_ratios_ = GetAttr<vector<float>>("aspect_ratios", attrs);
    variances_ = GetAttr<vector<float>>("variances", attrs);
1075 1076 1077 1078

    if (HasAttr("min_max_aspect_ratios_order", attrs)) {
      min_max_aspect_ratios_order_ =
          GetAttr<bool>("min_max_aspect_ratios_order", attrs);
Y
yangfei 已提交
1079 1080
    } else {
      min_max_aspect_ratios_order_ = false;
1081
    }
E
eclipsess 已提交
1082 1083 1084 1085 1086 1087
    flip_ = GetAttr<bool>("flip", attrs);
    clip_ = GetAttr<bool>("clip", attrs);
    step_w_ = GetAttr<float>("step_w", attrs);
    step_h_ = GetAttr<float>("step_h", attrs);
    offset_ = GetAttr<float>("offset", attrs);
  }
1088
  const GType *Input() const { return input_; }
E
eclipsess 已提交
1089

1090
  const GType *InputImage() const { return input_image_; }
E
eclipsess 已提交
1091

1092
  GType *OutputBoxes() const { return output_boxes_; }
E
eclipsess 已提交
1093

1094
  GType *OutputVariances() const { return output_variances_; }
E
eclipsess 已提交
1095

W
wangliu 已提交
1096
  const vector<float> &MinSizes() const { return min_sizes_; }
E
eclipsess 已提交
1097

W
wangliu 已提交
1098
  const vector<float> &MaxSizes() const { return max_sizes_; }
E
eclipsess 已提交
1099

W
wangliu 已提交
1100
  const vector<float> &AspectRatios() const { return aspect_ratios_; }
E
eclipsess 已提交
1101

W
wangliu 已提交
1102
  const vector<float> &Variances() const { return variances_; }
E
eclipsess 已提交
1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113

  const bool &Flip() const { return flip_; }

  const bool &Clip() const { return clip_; }

  const float &StepW() const { return step_w_; }

  const float &StepH() const { return step_h_; }

  const float &Offset() const { return offset_; }

1114 1115 1116 1117
  const bool &MinMaxAspectRatiosOrder() const {
    return min_max_aspect_ratios_order_;
  }

E
eclipsess 已提交
1118
 private:
1119 1120 1121 1122
  GType *input_;
  GType *input_image_;
  GType *output_boxes_;
  GType *output_variances_;
W
wangliu 已提交
1123 1124 1125 1126
  vector<float> min_sizes_;
  vector<float> max_sizes_;
  vector<float> aspect_ratios_;
  vector<float> variances_;
E
eclipsess 已提交
1127 1128 1129 1130 1131
  bool flip_;
  bool clip_;
  float step_w_;
  float step_h_;
  float offset_;
1132
  bool min_max_aspect_ratios_order_;
E
eclipsess 已提交
1133
};
L
liuruilong 已提交
1134
#endif
E
eclipsess 已提交
1135

L
liuruilong 已提交
1136
#ifdef BOXCODER_OP
N
nhzlx 已提交
1137
template <typename Dtype>
E
eclipsess 已提交
1138
class BoxCoderParam : public OpParam {
N
nhzlx 已提交
1139 1140 1141
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1142 1143
 public:
  BoxCoderParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1144 1145 1146 1147 1148 1149
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_priorbox_ = InputPriorBoxFrom<GType>(inputs, *scope);
    input_priorboxvar_ = InputPriorBoxVarFrom<GType>(inputs, *scope);
    input_targetbox_ = InputTargetBoxFrom<GType>(inputs, *scope);
    output_box_ = OutputBoxFrom<GType>(outputs, *scope);
1150
    code_type_ = GetStringAttr("code_type", attrs);
E
eclipsess 已提交
1151
  }
1152
  const GType *InputPriorBox() const { return input_priorbox_; }
E
eclipsess 已提交
1153

1154
  const GType *InputPriorBoxVar() const { return input_priorboxvar_; }
E
eclipsess 已提交
1155

1156
  const GType *InputTargetBox() const { return input_targetbox_; }
E
eclipsess 已提交
1157

1158
  GType *OutputBox() const { return output_box_; }
E
eclipsess 已提交
1159 1160 1161 1162

  const std::string &CodeType() const { return code_type_; }

 private:
1163 1164 1165 1166
  GType *input_priorbox_;
  GType *input_priorboxvar_;
  GType *input_targetbox_;
  GType *output_box_;
E
eclipsess 已提交
1167 1168
  std::string code_type_;
};
L
liuruilong 已提交
1169
#endif
W
wangliu 已提交
1170

L
liuruilong 已提交
1171
#ifdef SOFTMAX_OP
N
nhzlx 已提交
1172
template <typename Dtype>
W
wangliu 已提交
1173
class SoftmaxParam : public OpParam {
N
nhzlx 已提交
1174 1175 1176
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1177 1178
 public:
  SoftmaxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1179 1180 1181 1182
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
1183
  }
H
hjchen2 已提交
1184 1185
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
1186 1187

 private:
H
hjchen2 已提交
1188 1189
  GType *input_x_;
  GType *out_;
H
hanbuhe 已提交
1190 1191 1192

#ifdef PADDLE_MOBILE_FPGA

1193 1194
#ifdef PADDLE_MOBILE_FPGA_V1

H
hanbuhe 已提交
1195
 private:
1196
  std::shared_ptr<GType> float_input_x_;
H
hanbuhe 已提交
1197 1198 1199
  fpga::BypassArgs fpga_bypass_args;

 public:
1200
  GType *FloatInput() const {
H
hanbuhe 已提交
1201 1202
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
H
hjchen2 已提交
1203
  void SetFloatInput(LoDTensor *input) { float_input_x_.reset(input); }
H
hanbuhe 已提交
1204 1205
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217
#else

 private:
  fpga::BypassArgs fpga_bypass_args;

 public:
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }

 public:
  std::shared_ptr<Tensor> float_input_x_, float_out;
#endif
H
hanbuhe 已提交
1218
#endif
W
wangliu 已提交
1219
};
L
liuruilong 已提交
1220
#endif
W
wangliu 已提交
1221

L
liuruilong 已提交
1222
#ifdef SIGMOID_OP
N
nhzlx 已提交
1223
template <typename Dtype>
W
wangliu 已提交
1224
class SigmoidParam : public OpParam {
N
nhzlx 已提交
1225 1226 1227
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1228 1229
 public:
  SigmoidParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1230 1231 1232 1233
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
1234
  }
1235 1236
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
1237 1238

 private:
1239 1240
  GType *input_x_;
  GType *out_;
1241 1242 1243 1244 1245 1246 1247 1248 1249
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::BypassArgs fpga_bypass_args;

 public:
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
W
wangliu 已提交
1250
};
L
liuruilong 已提交
1251 1252 1253
#endif

#ifdef MULTICLASSNMS_OP
N
nhzlx 已提交
1254
template <typename Dtype>
E
eclipsess 已提交
1255
class MultiClassNMSParam : public OpParam {
N
nhzlx 已提交
1256 1257 1258
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1259 1260 1261
 public:
  MultiClassNMSParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
1262 1263 1264 1265 1266
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_bboxes_ = InputBBoxesFrom<GType>(inputs, *scope);
    input_scores_ = InputScoresFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1267 1268 1269 1270 1271 1272 1273 1274
    background_label_ = GetAttr<int>("background_label", attrs);
    nms_top_k_ = GetAttr<int>("nms_top_k", attrs);
    keep_top_k_ = GetAttr<int>("keep_top_k", attrs);
    nms_threshold_ = GetAttr<float>("nms_threshold", attrs);
    nms_eta_ = GetAttr<float>("nms_eta", attrs);
    score_threshold_ = GetAttr<float>("score_threshold", attrs);
  }

1275
  GType *InputBBoxes() const { return input_bboxes_; }
E
eclipsess 已提交
1276

1277
  GType *InputScores() const { return input_scores_; }
E
eclipsess 已提交
1278

1279
  GType *Out() const { return out_; }
E
eclipsess 已提交
1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293

  const int &BackGroundLabel() const { return background_label_; }

  const int &NMSTopK() const { return nms_top_k_; }

  const int &KeepTopK() const { return keep_top_k_; }

  const float &NMSThreshold() const { return nms_threshold_; }

  const float &NMSEta() const { return nms_eta_; }

  const float &ScoreThreshold() const { return score_threshold_; }

 private:
1294 1295 1296
  GType *input_bboxes_;
  GType *input_scores_;
  GType *out_;
E
eclipsess 已提交
1297 1298 1299 1300 1301 1302 1303
  int background_label_;
  int nms_top_k_;
  int keep_top_k_;
  float nms_threshold_;
  float nms_eta_;
  float score_threshold_;
};
L
liuruilong 已提交
1304
#endif
W
wangliu 已提交
1305

L
lijiancheng0614 已提交
1306 1307 1308 1309 1310 1311 1312 1313 1314
#ifdef POLYGONBOXTRANSFORM_OP
template <typename Dtype>
class PolygonBoxTransformParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  PolygonBoxTransformParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
1315 1316 1317 1318
                           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    output_ = OutputFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1319
  }
1320 1321
  const GType *Input() const { return input_; }
  GType *Output() const { return output_; }
L
lijiancheng0614 已提交
1322 1323

 private:
1324 1325
  GType *input_;
  GType *output_;
L
lijiancheng0614 已提交
1326 1327 1328
};
#endif

N
nhzlx 已提交
1329
template <typename Dtype>
L
liuruilong 已提交
1330
class FeedParam : public OpParam {
N
nhzlx 已提交
1331 1332 1333
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1334 1335
 public:
  FeedParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
1336
            const AttributeMap &attrs, Scope *scope)
1337
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
1338
    input_x_ = InputXFrom<std::vector<LoDTensor>>(inputs, *scope);
H
update  
hjchen2 已提交
1339
    out_ = OutFrom<GType>(outputs, *scope);
H
update  
hjchen2 已提交
1340
    col_ = GetAttr<int>("col", attrs);
H
update  
hjchen2 已提交
1341
    auto var = scope->FindVar("batch_size");
W
wangliu 已提交
1342
    batch_size = var->GetValue<int>();
L
liuruilong 已提交
1343
  }
H
hjchen2 已提交
1344
  const std::vector<LoDTensor> *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
1345
  GType *Out() const { return out_; }
H
update  
hjchen2 已提交
1346
  const int Col() const { return col_; }
W
wangliu 已提交
1347
  const int BatchSize() const { return batch_size; }
L
liuruilong 已提交
1348

L
liuruilong 已提交
1349
 private:
H
hjchen2 已提交
1350
  std::vector<LoDTensor> *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1351
  GType *out_;
H
update  
hjchen2 已提交
1352
  int col_;
W
wangliu 已提交
1353
  int batch_size;
L
liuruilong 已提交
1354 1355
};

N
nhzlx 已提交
1356
template <typename Dtype>
L
liuruilong 已提交
1357
class FetchParam : public OpParam {
N
nhzlx 已提交
1358 1359 1360
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1361 1362
 public:
  FetchParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
1363
             const AttributeMap &attrs, Scope *scope)
1364
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
1365 1366
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<std::vector<LoDTensor>>(outputs, *scope);
1367
    col_ = GetAttr<int>("col", attrs);
L
liuruilong 已提交
1368
  }
L
liuruilong 已提交
1369

H
hjchen2 已提交
1370 1371
  const GType *InputX() const { return input_x_; }
  std::vector<LoDTensor> *Out() const { return out_; }
1372
  const int Col() const { return col_; }
L
liuruilong 已提交
1373

L
liuruilong 已提交
1374
 private:
H
hjchen2 已提交
1375 1376
  GType *input_x_;
  std::vector<LoDTensor> *out_;
1377
  int col_;
qnqinan's avatar
qnqinan 已提交
1378
#ifdef PADDLE_MOBILE_FPGA
1379

qnqinan's avatar
qnqinan 已提交
1380
 public:
1381
#ifdef PADDLE_MOBILE_FPGA_V1
qnqinan's avatar
qnqinan 已提交
1382
  fpga::BypassArgs fpga_bypass_args;
1383
  Tensor aligned_out;
1384 1385 1386
#else
  std::shared_ptr<Tensor> aligned_out;
#endif
qnqinan's avatar
qnqinan 已提交
1387
#endif
L
liuruilong 已提交
1388 1389
};

L
lijiancheng0614 已提交
1390 1391 1392 1393 1394 1395 1396 1397 1398
#ifdef FILL_CONSTANT_OP
template <typename Dtype>
class FillConstantParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FillConstantParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
1399 1400 1401 1402
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    out_var_ = OutVarFrom(outputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1403 1404 1405 1406 1407 1408 1409
    dtype_ = GetAttr<int>("dtype", attrs);
    shape_ = GetAttr<vector<int>>("shape", attrs);
    value_ = GetAttr<float>("value", attrs);
  }

  Variable *OutVar() const { return out_var_; }

1410
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1411 1412 1413 1414 1415 1416 1417 1418 1419

  const int &DataDtype() const { return dtype_; }

  const vector<int> &Shape() const { return shape_; }

  const float &Value() const { return value_; }

 private:
  Variable *out_var_;
1420
  GType *out_;
L
lijiancheng0614 已提交
1421 1422 1423 1424 1425 1426
  int dtype_;
  vector<int> shape_;
  float value_;
};
#endif

1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475
#ifdef FILL_CONSTANT_BATCH_SIZE_LIKE_OP
template <typename Dtype>
class FillConstantBatchSizeLikeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FillConstantBatchSizeLikeParam(const VariableNameMap &inputs,
                                 const VariableNameMap &outputs,
                                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    out_var_ = OutVarFrom(outputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    dtype_ = GetAttr<int>("dtype", attrs);
    shape_ = GetAttr<vector<int>>("shape", attrs);
    value_ = GetAttr<float>("value", attrs);
    input_dim_idx_ = GetAttr<int>("input_dim_idx", attrs);
    output_dim_idx_ = GetAttr<int>("output_dim_idx", attrs);
  }

  Variable *OutVar() const { return out_var_; }

  const GType *Input() const { return input_; }

  GType *Out() const { return out_; }

  const int &DataDtype() const { return dtype_; }

  const vector<int> &Shape() const { return shape_; }

  const float &Value() const { return value_; }

  int InputDimIdx() const { return input_dim_idx_; }

  int OutputDimIdx() const { return output_dim_idx_; }

 private:
  GType *input_;
  Variable *out_var_;
  GType *out_;
  int dtype_;
  vector<int> shape_;
  float value_;
  int input_dim_idx_;
  int output_dim_idx_;
};
#endif

L
liuruilong 已提交
1476
#ifdef TRANSPOSE_OP
N
nhzlx 已提交
1477
template <typename Dtype>
E
eclipsess 已提交
1478
class TransposeParam : public OpParam {
N
nhzlx 已提交
1479 1480 1481
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1482 1483
 public:
  TransposeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1484 1485 1486 1487
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1488 1489 1490
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

1491
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1492

1493
  GType *Out() const { return out_; }
E
eclipsess 已提交
1494 1495 1496 1497

  const vector<int> &Axis() const { return axis_; }

 private:
1498 1499
  GType *input_x_;
  GType *out_;
E
eclipsess 已提交
1500 1501
  vector<int> axis_;
};
L
liuruilong 已提交
1502
#endif
E
eclipsess 已提交
1503

L
lijiancheng0614 已提交
1504 1505 1506 1507 1508 1509 1510 1511
#ifdef TRANSPOSE2_OP
template <typename Dtype>
class Transpose2Param : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  Transpose2Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
1512 1513 1514 1515 1516
                  const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_xshape_ = OutputXShapeFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1517 1518 1519
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

1520
  GType *InputX() const { return input_x_; }
L
lijiancheng0614 已提交
1521

1522
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1523

1524
  GType *OutputXShape() const { return output_xshape_; }
L
lijiancheng0614 已提交
1525 1526 1527 1528

  const vector<int> &Axis() const { return axis_; }

 private:
1529 1530 1531
  GType *input_x_;
  GType *out_;
  GType *output_xshape_;
L
lijiancheng0614 已提交
1532 1533 1534 1535
  vector<int> axis_;
};
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
1536 1537 1538 1539 1540 1541 1542 1543
#ifdef LOOKUP_OP
template <typename Dtype>
class LookupParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LookupParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1544 1545 1546 1547 1548
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_w_ = InputWFrom<GType>(inputs, *scope);
    input_ids_ = InputIdsFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574
    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }

  const GType *InputW() const { return input_w_; }
  const GType *InputIds() const { return input_ids_; }
  GType *Out() const { return out_; }
  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_w_;
  GType *input_ids_;
  GType *out_;
  int64_t padding_idx_;
};
#endif

#ifdef CRF_OP
template <typename Dtype>
class CrfParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  //    {G_OP_TYPE_CRF, {{"Emission", "Transition", "Label"}, {"ViterbiPath"}}},

  CrfParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1575 1576
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
xiebaiyuan's avatar
xiebaiyuan 已提交
1577
    // todo crf params
1578 1579 1580 1581
    input_emission_ = InputEmissionFrom<GType>(inputs, *scope);
    input_transition_ = InputTransitionFrom<GType>(inputs, *scope);
    input_label_ = InputLabelFrom<GType>(inputs, *scope);
    output_viterbipath_ = OutputViterbiPathFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
1582 1583 1584 1585 1586 1587
    //    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }
  const GType *InputEmission() const { return input_emission_; }
  const GType *InputTransition() const { return input_transition_; }
  const GType *InputLabel() const { return input_label_; }
  GType *outputVBP() const { return output_viterbipath_; }
1588 1589
  //  const GType *InputIds() const { return input_ids_; }
  //  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
1590 1591 1592 1593 1594 1595 1596 1597
  //  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_emission_;
  GType *input_transition_;
  GType *input_label_;
  GType *output_viterbipath_;

1598 1599
  //  GType *input_ids_;
  //  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1600 1601 1602 1603
  //  int64_t padding_idx_;
};
#endif

L
liuruilong 已提交
1604
#ifdef RESHAPE_OP
N
nhzlx 已提交
1605
template <typename Dtype>
E
eclipsess 已提交
1606
class ReshapeParam : public OpParam {
N
nhzlx 已提交
1607 1608 1609
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1610 1611
 public:
  ReshapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1612 1613 1614 1615 1616
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1617
    shape_ = GetAttr<vector<int>>("shape", attrs);
1618 1619 1620 1621 1622 1623 1624

    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
      DLOG << "ReshapeParam lost inplace params. maybe fluid updated";
    }
E
eclipsess 已提交
1625 1626
  }

1627
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1628

1629
  const GType *InputShape() const { return input_shape_; }
E
eclipsess 已提交
1630

1631
  GType *Out() const { return out_; }
E
eclipsess 已提交
1632 1633 1634 1635 1636 1637

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
1638 1639 1640
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
E
eclipsess 已提交
1641 1642 1643
  vector<int> shape_;
  bool inplace_;
};
L
liuruilong 已提交
1644
#endif
E
eclipsess 已提交
1645

L
lijiancheng0614 已提交
1646 1647 1648 1649 1650 1651 1652 1653
#ifdef RESHAPE2_OP
template <typename Dtype>
class Reshape2Param : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  Reshape2Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
1654 1655 1656 1657 1658 1659
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_xshape_ = OutputXShapeFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1660 1661 1662 1663 1664 1665 1666 1667
    shape_ = GetAttr<vector<int>>("shape", attrs);
    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
    }
  }

1668
  GType *InputX() const { return input_x_; }
L
lijiancheng0614 已提交
1669

E
eclipsess 已提交
1670
  const GType *InputShape() const { return input_shape_; }
L
lijiancheng0614 已提交
1671

E
eclipsess 已提交
1672
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1673

E
eclipsess 已提交
1674
  GType *OutputXShape() const { return output_xshape_; }
L
lijiancheng0614 已提交
1675 1676 1677 1678 1679 1680

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
E
eclipsess 已提交
1681 1682 1683 1684
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
  GType *output_xshape_;
L
lijiancheng0614 已提交
1685 1686 1687 1688 1689
  vector<int> shape_;
  bool inplace_;
};
#endif

T
Tian 已提交
1690
#ifdef SCALE_OP
N
nhzlx 已提交
1691
template <typename Dtype>
I
itminner 已提交
1692
class ScaleParam : public OpParam {
N
nhzlx 已提交
1693 1694 1695
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1696 1697
 public:
  ScaleParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1698 1699 1700 1701
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
1702 1703
    scale_ = GetAttr<float>("scale", attrs);
    bias_ = GetAttr<float>("bias", attrs);
I
itminner 已提交
1704 1705
  }

1706
  const GType *InputX() const { return input_x_; }
I
itminner 已提交
1707

1708
  GType *Out() const { return out_; }
I
itminner 已提交
1709

1710
  const float Scale() const { return scale_; }
I
itminner 已提交
1711

1712
  const float Bias() const { return bias_; }
I
itminner 已提交
1713 1714

 private:
1715 1716
  GType *input_x_;
  GType *out_;
1717 1718
  float scale_;
  float bias_;
I
itminner 已提交
1719
};
T
Tian 已提交
1720 1721 1722
#endif

#ifdef SLICE_OP
N
nhzlx 已提交
1723
template <typename Dtype>
I
itminner 已提交
1724
class SliceParam : public OpParam {
N
nhzlx 已提交
1725 1726 1727
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1728 1729
 public:
  SliceParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1730 1731 1732 1733
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
I
itminner 已提交
1734

1735 1736 1737
    axes_ = GetAttr<std::vector<int>>("axes", attrs);
    starts_ = GetAttr<std::vector<int>>("starts", attrs);
    ends_ = GetAttr<std::vector<int>>("ends", attrs);
1738 1739

    original_output_dims_size_ = output_->dims().size();
1740
  }
I
itminner 已提交
1741

1742 1743 1744 1745 1746 1747
 public:
  GType *input_;
  GType *output_;
  std::vector<int> axes_;
  std::vector<int> starts_;
  std::vector<int> ends_;
1748
  int original_output_dims_size_;
I
itminner 已提交
1749
};
T
Tian 已提交
1750 1751 1752
#endif

#ifdef RESIZE_OP
N
nhzlx 已提交
1753
template <typename Dtype>
T
Tian 已提交
1754
class ResizeParam : public OpParam {
N
nhzlx 已提交
1755 1756 1757
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1758 1759
 public:
  ResizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1760 1761 1762 1763 1764
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
I
itminner 已提交
1765 1766 1767 1768 1769 1770
    is_pyramid_test_ = GetAttr<bool>("is_pyramid_test", attrs);
    height_ = GetAttr<int>("height", attrs);
    width_ = GetAttr<int>("width", attrs);
    out_height_scale_ = GetAttr<float>("out_height_scale", attrs);
    out_width_scale_ = GetAttr<float>("out_width_scale", attrs);
  }
T
Tian 已提交
1771

1772
  const GType *InputX() const { return input_x_; }
T
Tian 已提交
1773

1774
  const GType *InputShape() const { return input_shape_; }
T
Tian 已提交
1775

1776
  GType *Out() const { return out_; }
T
Tian 已提交
1777

I
itminner 已提交
1778
  const bool &IsPyramidTest() const { return is_pyramid_test_; }
T
Tian 已提交
1779

I
itminner 已提交
1780
  const int &Height() const { return height_; }
T
Tian 已提交
1781

I
itminner 已提交
1782
  const int &Width() const { return width_; }
T
Tian 已提交
1783

I
itminner 已提交
1784
  const float &OutHeightScale() const { return out_height_scale_; }
T
Tian 已提交
1785

I
itminner 已提交
1786
  const float &OutWidthScale() const { return out_width_scale_; }
T
Tian 已提交
1787

I
itminner 已提交
1788
 private:
1789 1790 1791
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
I
itminner 已提交
1792 1793 1794 1795 1796
  bool is_pyramid_test_;
  int height_;
  int width_;
  float out_height_scale_;
  float out_width_scale_;
T
Tian 已提交
1797 1798 1799
};
#endif

L
liuruilong 已提交
1800
#ifdef RELU_OP
L
liuruilong 已提交
1801 1802 1803
/*
 * @b op 层实例化好这个 param 传递给 kernel 层使用
 * */
N
nhzlx 已提交
1804
template <typename Dtype>
D
relu  
dolphin8 已提交
1805
class ReluParamBase : public OpParam {
N
nhzlx 已提交
1806 1807 1808
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1809
 public:
D
relu  
dolphin8 已提交
1810
  ReluParamBase(const VariableNameMap &inputs, const VariableNameMap &outputs,
1811 1812 1813 1814
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1815 1816
  }

1817
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1818

1819
  GType *Out() const { return out_; }
E
eclipsess 已提交
1820 1821

 private:
1822 1823
  GType *input_x_;
  GType *out_;
E
eclipsess 已提交
1824
};
D
relu  
dolphin8 已提交
1825 1826 1827

template <typename Dtype>
class ReluParam : public ReluParamBase<Dtype> {
Y
yangfei 已提交
1828
 public:
D
relu  
dolphin8 已提交
1829 1830 1831
  using ReluParamBase<Dtype>::ReluParamBase;
};

Z
zp7 已提交
1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845
template <typename Dtype>
class Relu6Param : public ReluParamBase<Dtype> {
 public:
  Relu6Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, Scope *scope)
      : ReluParamBase<Dtype>(inputs, outputs, attrs, scope) {
    threshold = OpParam::GetAttr<float>("threshold", attrs);
  }
  float getThreshold() const { return threshold; }

 private:
  float threshold;
};

Y
yangfei 已提交
1846
#ifdef PADDLE_MOBILE_CL
D
relu  
dolphin8 已提交
1847 1848
template <>
class ReluParam<GPU_CL> : public ReluParamBase<GPU_CL> {
Y
yangfei 已提交
1849
 public:
D
relu  
dolphin8 已提交
1850
  using ReluParamBase<GPU_CL>::ReluParamBase;
Y
yangfei 已提交
1851 1852 1853
  framework::CLImage &getMidImage() { return midImage; }

 private:
D
relu  
dolphin8 已提交
1854 1855
  framework::CLImage midImage;
};
Y
yangfei 已提交
1856
#endif
D
relu  
dolphin8 已提交
1857

L
liuruilong 已提交
1858
#endif
E
eclipsess 已提交
1859

Z
zhangyang 已提交
1860 1861 1862 1863 1864 1865 1866 1867
#ifdef TANH_OP
template <typename Dtype>
class TanhParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  TanhParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1868 1869 1870 1871
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Z
zhangyang 已提交
1872
  }
1873 1874
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
Z
zhangyang 已提交
1875 1876

 private:
1877 1878
  GType *input_x_;
  GType *out_;
qnqinan's avatar
qnqinan 已提交
1879 1880 1881
#ifdef PADDLE_MOBILE_FPGA

 private:
1882
  std::shared_ptr<GType> float_input_x_;
qnqinan's avatar
qnqinan 已提交
1883 1884 1885
  fpga::BypassArgs fpga_bypass_args;

 public:
1886
  GType *FloatInput() const {
qnqinan's avatar
qnqinan 已提交
1887 1888
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
H
hjchen2 已提交
1889
  void SetFloatInput(LoDTensor *input) { float_input_x_.reset(input); }
qnqinan's avatar
qnqinan 已提交
1890 1891 1892
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
Z
zhangyang 已提交
1893
};
L
liuruilong 已提交
1894
#endif
E
eclipsess 已提交
1895

T
Tian 已提交
1896
#ifdef PRELU_OP
N
nhzlx 已提交
1897
template <typename Dtype>
T
Tian 已提交
1898
class PReluParam : public OpParam {
N
nhzlx 已提交
1899 1900 1901
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1902 1903
 public:
  PReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1904 1905
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
1906
    DLOG << "PReluParam inputs before";
1907 1908
    input_x_ = InputXFrom<GType>(inputs, *scope);
    alpha_ = InputAlphaFrom<GType>(inputs, *scope);
1909
    framework::DDim dims = alpha_->dims();
1910
    out_ = OutFrom<GType>(outputs, *scope);
1911
    mode_ = GetStringAttr("mode", attrs);
1912
    DLOG << "PReluParam mode after" << mode_;
I
itminner 已提交
1913
  }
1914 1915 1916
  const GType *InputX() const { return input_x_; }
  const GType *InputAlpha() const { return alpha_; }
  GType *Out() const { return out_; }
1917
  const std::string &Mode() const { return mode_; }
T
Tian 已提交
1918

I
itminner 已提交
1919
 private:
1920 1921 1922
  GType *input_x_;
  GType *out_;
  GType *alpha_;
1923
  std::string mode_;
T
Tian 已提交
1924 1925 1926
};
#endif

1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951
#ifdef LEAKY_RELU_OP
template <typename Dtype>
class LeakyReluParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LeakyReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    alpha_ = GetAttr<float>("alpha", attrs);
  }
  const GType *InputX() const { return input_x_; }
  const float Alpha() const { return alpha_; }
  GType *Out() const { return out_; }

 private:
  GType *input_x_;
  GType *out_;
  float alpha_;
};
#endif

N
nhzlx 已提交
1952
template <typename Dtype>
L
liuruilong 已提交
1953
class FusionFcParam : public OpParam {
N
nhzlx 已提交
1954 1955 1956
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1957
 public:
L
liuruilong 已提交
1958
  FusionFcParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1959 1960 1961 1962 1963 1964
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    input_z_ = InputZFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1965 1966 1967 1968
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }
Y
yangfei 已提交
1969
  GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1970

1971
  GType *InputY() const { return input_y_; }
E
eclipsess 已提交
1972

1973
  GType *InputZ() const { return input_z_; }
E
eclipsess 已提交
1974

xiebaiyuan's avatar
xiebaiyuan 已提交
1975
  GType *Out() const { return out_; }
E
eclipsess 已提交
1976 1977 1978 1979 1980 1981 1982 1983

  const int &XNumColDims() const { return x_num_col_dims_; }

  const int &YNumColDims() const { return y_num_col_dims_; }

  const int &Axis() const { return axis_; }

 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
1984
  GType *input_x_;
1985 1986
  GType *input_y_;
  GType *input_z_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1987
  GType *out_;
E
eclipsess 已提交
1988 1989 1990
  int x_num_col_dims_;
  int y_num_col_dims_;
  int axis_;
Z
zhangyang 已提交
1991

Z
ZhenWang 已提交
1992
#ifdef PADDLE_MOBILE_FPGA
1993
 private:  // NOLINT
Z
zhangyang 已提交
1994
  fpga::SplitConvArgs fpga_conv_args;
Z
zhangyang 已提交
1995 1996

 public:
Z
zhangyang 已提交
1997 1998
  const fpga::SplitConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::SplitConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1999
#endif
E
eclipsess 已提交
2000
};
2001 2002

#ifdef FUSION_FCRELU_OP
N
nhzlx 已提交
2003 2004
template <typename DeviceType>
using FusionFcReluParam = FusionFcParam<DeviceType>;
L
liuruilong 已提交
2005
#endif
E
eclipsess 已提交
2006

N
nhzlx 已提交
2007
template <typename Dtype>
2008
class FusionConvAddParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2009 2010 2011
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
2012
 public:
L
liuruilong 已提交
2013
  FusionConvAddParam(const VariableNameMap &inputs,
L
liuruilong 已提交
2014
                     const VariableNameMap &outputs, const AttributeMap &attrs,
2015
                     Scope *scope)
2016
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2017
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2018
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2019
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
2020
  }
2021
  GType *Bias() const { return bias_; }
W
wangliu 已提交
2022 2023 2024

  const int &Axis() const { return axis_; }

L
liuruilong 已提交
2025
 protected:
2026
  GType *bias_;
W
wangliu 已提交
2027 2028 2029
  int axis_;
};

N
nhzlx 已提交
2030 2031
template <typename Dtype>
Print &operator<<(Print &printer, const FusionConvAddParam<Dtype> &conv_param);
W
wangliu 已提交
2032

Z
zhangyang 已提交
2033
#ifdef FUSION_CONVADDRELU_OP
N
nhzlx 已提交
2034 2035
template <typename DeviceType>
class FusionConvAddReluParam : public FusionConvAddParam<DeviceType> {
L
liuruilong 已提交
2036
 public:
L
liuruilong 已提交
2037
  FusionConvAddReluParam(const VariableNameMap &inputs,
L
liuruilong 已提交
2038
                         const VariableNameMap &outputs,
2039
                         const AttributeMap &attrs, Scope *scope)
2040
      : FusionConvAddParam<DeviceType>(inputs, outputs, attrs, scope) {}
L
liuruilong 已提交
2041 2042 2043
};
#endif

2044
#ifdef FUSION_CONVADDPRELU_OP
2045 2046 2047 2048
template <typename Dtype>
class FusionConvAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2049 2050 2051 2052

 public:
  FusionConvAddPReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2053
                          const AttributeMap &attrs, Scope *scope)
2054
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2055
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, *scope);
2056
    mode_ = OpParam::GetStringAttr("mode", attrs);
2057
    framework::DDim dims = alpha_->dims();
2058
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2059
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2060
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2061
  }
2062
  const GType *InputAlpha() const { return alpha_; }
2063
  const std::string &Mode() const { return mode_; }
2064
  GType *Bias() const { return bias_; }
2065 2066 2067
  const int &Axis() const { return axis_; }

 protected:
2068
  GType *bias_;
2069
  int axis_;
2070
  GType *alpha_;
2071 2072 2073 2074 2075
  std::string mode_;
};
#endif

#ifdef FUSION_CONVADDADDPRELU_OP
2076 2077 2078 2079
template <typename Dtype>
class FusionConvAddAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2080 2081 2082 2083

 public:
  FusionConvAddAddPReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
2084
                             const AttributeMap &attrs, Scope *scope)
2085
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2086 2087
    bias1_ = OpParam::InputYFrom1<GType>(inputs, *scope);
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, *scope);
2088
    mode_ = OpParam::GetStringAttr("mode", attrs);
2089
    framework::DDim dims = alpha_->dims();
H
update  
hjchen2 已提交
2090
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2091
    axis_ = OpParam::GetAttr<int>("axis", attrs);
2092 2093 2094
    keyOutput_ = OpParam::Getkey("addOut", inputs, 0);
    keyX1_ = OpParam::Getkey("addX", inputs, 1);
    keyY1_ = OpParam::Getkey("Y", inputs, 1);
2095
    if (keyX1_ == keyOutput_) {
2096
      bias1_ = OpParam::InputYFrom1<GType>(inputs, *scope);
2097
    } else if (keyY1_ == keyOutput_) {
2098
      bias1_ = OpParam::InputXFrom1<GType>(inputs, *scope);
2099
    }
H
update  
hjchen2 已提交
2100
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2101
  }
2102
  const GType *InputAlpha() const { return alpha_; }
2103
  const std::string &Mode() const { return mode_; }
2104
  const GType *Bias1() const { return bias1_; }
2105

2106
  GType *Bias() const { return bias_; }
2107 2108 2109 2110

  const int &Axis() const { return axis_; }

 protected:
2111
  GType *bias_;
2112
  int axis_;
2113
  GType *alpha_;
2114
  std::string mode_;
2115
  GType *bias1_;
2116 2117 2118 2119 2120 2121
  std::string keyOutput_;
  std::string keyX1_;
  std::string keyY1_;
};
#endif

E
eclipsess 已提交
2122
#ifdef FUSION_CONVADDBNRELU_OP
N
nhzlx 已提交
2123
template <typename Dtype>
2124
class FusionConvAddBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2125 2126 2127
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
2128 2129 2130
 public:
  FusionConvAddBNReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
2131
                           const AttributeMap &attrs, Scope *scope)
2132
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2133
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2134
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2135 2136 2137 2138
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2139 2140
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2141
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
2142
  }
2143

2144
  ~FusionConvAddBNReluParam() {}
2145

2146
  GType *Bias() const { return bias_; }
E
eclipsess 已提交
2147 2148 2149

  const int &Axis() const { return axis_; }

2150
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
2151

2152
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
2153

2154
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
2155

2156
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
2157 2158 2159 2160 2161

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2162 2163 2164
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2165

2166 2167 2168
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2169

2170
  const GType *NewScale() const { return new_scale_.get(); }
E
eclipsess 已提交
2171

2172
  const GType *NewBias() const { return new_bias_.get(); }
E
eclipsess 已提交
2173 2174

 protected:
2175
  GType *bias_;
E
eclipsess 已提交
2176
  int axis_;
2177 2178 2179 2180
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
E
eclipsess 已提交
2181 2182
  float epsilon_;
  float momentum_;
2183 2184
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2185 2186 2187 2188 2189
};
#endif

#ifdef FUSION_CONVBNADDRELU_OP
template <typename Dtype>
2190
class FusionConvBNAddReluParam : public ConvParam<Dtype> {
2191 2192 2193 2194 2195 2196
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvBNAddReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
2197
                           const AttributeMap &attrs, Scope *scope)
2198
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2199
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2200
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2201 2202 2203 2204
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2205 2206
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
2207 2208 2209
    keyBNY_ = OpParam::Getkey("BNY", inputs, 0);
    keyX_ = OpParam::Getkey("X", inputs, 0);
    keyY_ = OpParam::Getkey("Y", inputs, 0);
2210
    if (keyX_ == keyBNY_) {
2211
      bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2212
    } else if (keyY_ == keyBNY_) {
2213
      bias_ = OpParam::InputXFrom<GType>(inputs, *scope);
2214
    }
H
update  
hjchen2 已提交
2215
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2216
  }
2217

2218
  ~FusionConvBNAddReluParam() {}
2219
  GType *Bias() const { return bias_; }
2220 2221 2222

  const int &Axis() const { return axis_; }

2223
  const GType *InputBias() const { return input_bias_; }
2224

2225
  const GType *InputMean() const { return input_mean_; }
2226

2227
  const GType *InputScale() const { return input_scale_; }
2228

2229
  const GType *InputVariance() const { return input_variance_; }
2230 2231 2232 2233 2234

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2235 2236 2237
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2238

2239 2240 2241
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2242

2243
  const GType *NewScale() const { return new_scale_.get(); }
2244

2245
  const GType *NewBias() const { return new_bias_.get(); }
2246 2247

 protected:
2248
  GType *bias_;
2249
  int axis_;
2250 2251 2252 2253
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2254 2255
  float epsilon_;
  float momentum_;
2256 2257
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2258 2259 2260
  std::string keyBNY_;
  std::string keyX_;
  std::string keyY_;
E
eclipsess 已提交
2261
};
2262
#endif
E
eclipsess 已提交
2263

Z
zhangyang 已提交
2264
#ifdef FUSION_CONVBN_OP
N
nhzlx 已提交
2265
template <typename Dtype>
2266
class FusionConvBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2267 2268 2269
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Z
zhangyang 已提交
2270 2271 2272
 public:
  FusionConvBNParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
2273
                    Scope *scope)
2274
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2275 2276 2277 2278
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2279 2280
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2281
    this->output_ = OpParam::OutputYFrom<GType>(outputs, *scope);
Z
zhangyang 已提交
2282 2283
  }

2284
  const GType *InputBias() const { return input_bias_; }
Z
zhangyang 已提交
2285

2286
  const GType *InputMean() const { return input_mean_; }
Z
zhangyang 已提交
2287

2288
  const GType *InputScale() const { return input_scale_; }
Z
zhangyang 已提交
2289

2290
  const GType *InputVariance() const { return input_variance_; }
Z
zhangyang 已提交
2291 2292 2293 2294 2295

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2296 2297 2298
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
Z
zhangyang 已提交
2299

2300 2301 2302
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
Z
zhangyang 已提交
2303

2304
  const GType *NewScale() const { return new_scale_.get(); }
Z
zhangyang 已提交
2305

2306
  const GType *NewBias() const { return new_bias_.get(); }
Z
zhangyang 已提交
2307 2308

 protected:
2309 2310 2311 2312
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
Z
zhangyang 已提交
2313 2314
  float epsilon_;
  float momentum_;
2315 2316
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
Z
zhangyang 已提交
2317 2318 2319
};
#endif

2320
#ifdef FUSION_CONVADDBN_OP
N
nhzlx 已提交
2321
template <typename Dtype>
2322
class FusionConvAddBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2323 2324 2325
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

2326 2327 2328
 public:
  FusionConvAddBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
2329
                       const AttributeMap &attrs, Scope *scope)
2330
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2331
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2332
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2333 2334 2335 2336
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2337 2338
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2339
    this->output_ = OpParam::OutputYFrom<GType>(outputs, *scope);
2340
  }
2341
  GType *Bias() const { return bias_; }
2342 2343 2344

  const int &Axis() const { return axis_; }

2345
  const GType *InputBias() const { return input_bias_; }
2346

2347
  const GType *InputMean() const { return input_mean_; }
2348

2349
  const GType *InputScale() const { return input_scale_; }
2350

2351
  const GType *InputVariance() const { return input_variance_; }
2352 2353 2354 2355 2356

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2357 2358 2359
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2360

2361 2362 2363
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2364

2365
  const GType *NewScale() const { return new_scale_.get(); }
2366

2367
  const GType *NewBias() const { return new_bias_.get(); }
2368 2369

 protected:
2370
  GType *bias_;
2371
  int axis_;
2372 2373 2374 2375
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2376 2377
  float epsilon_;
  float momentum_;
2378 2379
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2380
};
E
eclipsess 已提交
2381
#endif
Y
Yao,kun 已提交
2382

E
eclipsess 已提交
2383
#ifdef FUSION_DWCONVBNRELU_OP
N
nhzlx 已提交
2384
template <typename Dtype>
2385
class FusionDWConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2386 2387 2388
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
2389 2390 2391
 public:
  FusionDWConvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2392
                          const AttributeMap &attrs, Scope *scope)
2393
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2394 2395 2396 2397
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2398 2399
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2400
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
2401 2402
  }

2403
  ~FusionDWConvBNReluParam() {}
2404

2405
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
2406

2407
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
2408

2409
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
2410

2411
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
2412 2413 2414 2415 2416

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2417 2418 2419
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2420

2421 2422 2423
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2424

2425
  const GType *NewScale() const { return new_scale_.get(); }
E
eclipsess 已提交
2426

2427
  const GType *NewBias() const { return new_bias_.get(); }
E
eclipsess 已提交
2428 2429

 protected:
2430 2431 2432 2433
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
E
eclipsess 已提交
2434 2435
  float epsilon_;
  float momentum_;
2436 2437
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
E
eclipsess 已提交
2438 2439 2440 2441
};

#endif

2442 2443 2444 2445 2446 2447 2448 2449 2450 2451 2452 2453 2454 2455 2456 2457
#ifdef FUSION_CONVRELU_OP
template <typename Dtype>
class FusionConvReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvReluParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
                      Scope *scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
  }
};
#endif

2458
#ifdef FUSION_CONVBNRELU_OP
N
nhzlx 已提交
2459
template <typename Dtype>
2460
class FusionConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2461 2462 2463
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

2464 2465 2466
 public:
  FusionConvBNReluParam(const VariableNameMap &inputs,
                        const VariableNameMap &outputs,
2467
                        const AttributeMap &attrs, Scope *scope)
2468
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2469 2470 2471 2472
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2473 2474
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2475
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2476 2477
  }

2478
  ~FusionConvBNReluParam() {}
2479

2480
  const GType *InputBias() const { return input_bias_; }
2481

2482
  const GType *InputMean() const { return input_mean_; }
2483

2484
  const GType *InputScale() const { return input_scale_; }
2485

2486
  const GType *InputVariance() const { return input_variance_; }
2487 2488 2489 2490 2491

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2492 2493 2494
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2495

2496 2497 2498
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2499

2500
  const GType *NewScale() const { return new_scale_.get(); }
2501

2502
  const GType *NewBias() const { return new_bias_.get(); }
2503 2504

 protected:
2505 2506 2507 2508
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2509 2510
  float epsilon_;
  float momentum_;
2511 2512
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2513 2514 2515
};
#endif

Y
Yao,kun 已提交
2516
#ifdef IM2SEQUENCE_OP
N
nhzlx 已提交
2517
template <typename Dtype>
Y
Yao,kun 已提交
2518
class Im2SequenceParam : public OpParam {
N
nhzlx 已提交
2519 2520 2521
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
2522 2523 2524
 public:
  Im2SequenceParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
2525 2526 2527 2528
                   Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Y
Yao,kun 已提交
2529 2530 2531 2532 2533
    kernels_ = GetAttr<vector<int>>("kernels", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
  }

E
eclipsess 已提交
2534
  const GType *Input() const { return input_x_; }
Y
Yao,kun 已提交
2535

E
eclipsess 已提交
2536
  GType *Output() const { return out_; }
Y
Yao,kun 已提交
2537 2538 2539 2540 2541 2542 2543 2544

  const vector<int> &Kernels() const { return kernels_; }

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

 private:
E
eclipsess 已提交
2545 2546
  GType *input_x_;
  GType *out_;
Y
Yao,kun 已提交
2547 2548 2549 2550
  vector<int> kernels_;
  vector<int> strides_;
  vector<int> paddings_;
};
2551
#endif
Y
Yao,kun 已提交
2552

2553
#ifdef DROPOUT_OP
N
nhzlx 已提交
2554
template <typename Dtype>
Y
Yao,kun 已提交
2555
class DropoutParam : public OpParam {
N
nhzlx 已提交
2556 2557 2558
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
2559 2560
 public:
  DropoutParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2561 2562 2563 2564
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Y
yangfei 已提交
2565 2566

    dropout_prob_ = GetAttr<float>("dropout_prob", attrs);
Y
Yao,kun 已提交
2567 2568
  }

2569
  const GType *InputX() const { return input_x_; }
Y
Yao,kun 已提交
2570

2571
  GType *Out() const { return out_; }
Y
Yao,kun 已提交
2572

Y
yangfei 已提交
2573 2574
  float DropoutProb() const { return dropout_prob_; }

Y
Yao,kun 已提交
2575
 private:
2576 2577
  GType *input_x_;
  GType *out_;
Y
yangfei 已提交
2578
  float dropout_prob_;
Y
Yao,kun 已提交
2579
};
2580
#endif
Y
Yao,kun 已提交
2581

N
nhzlx 已提交
2582
template <typename Dtype>
L
liuruilong 已提交
2583
class ConvTransposeParam : public OpParam {
N
nhzlx 已提交
2584 2585 2586
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
2587 2588 2589
 public:
  ConvTransposeParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
2590 2591
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
2592 2593
    filter_ = OpParam::FilterFrom<GType>(inputs, *scope);
    input_ = OpParam::InputFrom<GType>(inputs, *scope);
2594
    // output_ = OutputFrom<GType>(outputs, scope);
qnqinan's avatar
qnqinan 已提交
2595
    if (outputs.count("Output")) {
2596
      output_ = OpParam::OutputFrom<GType>(outputs, *scope);
qnqinan's avatar
qnqinan 已提交
2597
    }
L
liuruilong 已提交
2598 2599 2600
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
2601 2602 2603 2604
    if (HasAttr("output_size", attrs)) {
      output_size_ = GetAttr<vector<int>>("output_size", attrs);
      DLOG << "conv transpose output size: " << output_size_;
    }
L
liuruilong 已提交
2605 2606 2607
    groups = GetAttr<int>("groups", attrs);
  }

2608
  const GType *Input() const { return input_; }
L
liuruilong 已提交
2609

2610
  GType *Filter() const { return filter_; }
L
liuruilong 已提交
2611

2612
  GType *Output() const { return output_; }
L
liuruilong 已提交
2613 2614 2615 2616 2617

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

2618 2619 2620 2621
  const vector<int> &Filters() const { return filter_; }

  const vector<int> &TransFilters() const { return transformed_filter_; }

L
liuruilong 已提交
2622 2623
  const vector<int> &Dilations() const { return dilations_; }

2624 2625
  const vector<int> &OutputSize() const { return output_size_; }

L
liuruilong 已提交
2626 2627
  const int &Groups() const { return groups; }

H
hjchen2 已提交
2628 2629 2630 2631 2632
  enum ExecMode {
    EXEC_INVALID = 0,
    EXEC_GEMM_FLOAT,
    EXEC_DECONV3X3_FLOAT,
    EXEC_DECONV4X4_FLOAT,
2633 2634
    EXEC_DEPTHWISETRANS_FLOAT,
    EXEC_CONVTRANS3x3s2_FLOAT,
2635
    EXEC_CONVTRANS_FLOAT,
H
hjchen2 已提交
2636 2637 2638 2639
  };

  ExecMode &ExecMode() const { return exec_mode_; }

L
liuruilong 已提交
2640
 private:
2641 2642 2643
  GType *input_;
  GType *output_;
  GType *filter_;
2644
  GType *transformed_filter_;
L
liuruilong 已提交
2645 2646 2647
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
2648
  vector<int> output_size_;
L
liuruilong 已提交
2649
  int groups;
H
hjchen2 已提交
2650
  mutable enum ExecMode exec_mode_;
Z
zhangyang 已提交
2651 2652 2653 2654 2655

#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::DeconvArgs fpga_conv_args;
qnqinan's avatar
qnqinan 已提交
2656
  fpga::DWDeconvArgs fpga_DWDeconv_args;
Z
zhangyang 已提交
2657 2658 2659

 public:
  const fpga::DeconvArgs &FpgaArgs() const { return fpga_conv_args; }
qnqinan's avatar
qnqinan 已提交
2660 2661 2662
  const fpga::DWDeconvArgs &FpgaDWDconvArgs() const {
    return fpga_DWDeconv_args;
  }
Z
zhangyang 已提交
2663
  void SetFpgaArgs(const fpga::DeconvArgs &args) { fpga_conv_args = args; }
qnqinan's avatar
qnqinan 已提交
2664 2665 2666
  void SetFpgaArgs(const fpga::DWDeconvArgs &args) {
    fpga_DWDeconv_args = args;
  }
Z
zhangyang 已提交
2667
#endif
L
liuruilong 已提交
2668
};
Z
zhangyang 已提交
2669

qnqinan's avatar
qnqinan 已提交
2670 2671 2672 2673 2674
#ifdef FUSION_DECONVADD_OP
template <typename Dtype>
class FusionDeconvAddParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2675 2676

 public:
qnqinan's avatar
qnqinan 已提交
2677
  FusionDeconvAddParam(const VariableNameMap &inputs,
2678
                       const VariableNameMap &outputs,
2679
                       const AttributeMap &attrs, Scope *scope)
2680
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2681
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
qnqinan's avatar
qnqinan 已提交
2682
    axis_ = OpParam::GetAttr<int>("axis", attrs);
2683
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
qnqinan's avatar
qnqinan 已提交
2684
  }
2685
  GType *Bias() const { return bias_; }
qnqinan's avatar
qnqinan 已提交
2686 2687 2688

  const int &Axis() const { return axis_; }

2689
  GType *Output() const { return output_; }
qnqinan's avatar
qnqinan 已提交
2690 2691

 protected:
2692
  GType *bias_;
qnqinan's avatar
qnqinan 已提交
2693
  int axis_;
2694
  GType *output_;
qnqinan's avatar
qnqinan 已提交
2695 2696 2697 2698 2699 2700 2701
};
#endif

#ifdef FUSION_DECONVADDRELU_OP
template <typename Dtype>
using FusionDeconvAddReluParam = FusionDeconvAddParam<Dtype>;
#endif
2702 2703 2704 2705 2706 2707 2708 2709 2710
#ifdef FUSION_DECONVADDBN_OP
template <typename Dtype>
class FusionDeconvAddBNParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvAddBNParam(const VariableNameMap &inputs,
                         const VariableNameMap &outputs,
2711
                         const AttributeMap &attrs, Scope *scope)
2712
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2713 2714 2715 2716 2717
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2718 2719 2720 2721 2722 2723 2724
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }
2725 2726 2727 2728 2729 2730 2731 2732 2733 2734 2735 2736 2737

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2738 2739 2740
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2741

2742 2743 2744
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2745

2746
  const RType *NewScale() const { return new_scale_.get(); }
2747

2748
  const RType *NewBias() const { return new_bias_.get(); }
2749 2750 2751 2752 2753 2754 2755 2756 2757 2758

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2759 2760
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2761 2762 2763 2764 2765 2766 2767 2768 2769 2770 2771
};
#endif
#ifdef FUSION_DECONVBNRELU_OP
template <typename Dtype>
class FusionDeconvBNReluParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2772
                          const AttributeMap &attrs, Scope *scope)
2773
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2774 2775 2776 2777 2778
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2779 2780 2781 2782 2783 2784
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }
2785 2786 2787 2788 2789 2790 2791 2792 2793 2794 2795 2796 2797

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2798 2799 2800
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2801

2802 2803 2804
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2805

2806
  const RType *NewScale() const { return new_scale_.get(); }
2807

2808
  const RType *NewBias() const { return new_bias_.get(); }
2809 2810 2811 2812 2813 2814 2815 2816 2817 2818

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2819 2820
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831
};
#endif
#ifdef FUSION_DECONVADDBNRELU_OP
template <typename Dtype>
class FusionDeconvAddBNReluParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvAddBNReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
2832
                             const AttributeMap &attrs, Scope *scope)
2833
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2834 2835 2836 2837 2838
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852 2853 2854 2855 2856 2857 2858
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2859 2860 2861
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2862

2863 2864 2865
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2866

2867
  const RType *NewScale() const { return new_scale_.get(); }
2868

2869
  const RType *NewBias() const { return new_bias_.get(); }
2870 2871 2872 2873 2874 2875 2876 2877 2878 2879

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2880 2881
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2882 2883
};
#endif
L
liuruilong 已提交
2884

Z
zhangyang 已提交
2885 2886 2887 2888 2889
#ifdef FUSION_DECONVRELU_OP
template <typename Dtype>
using FusionDeconvReluParam = ConvTransposeParam<Dtype>;
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903
#ifdef GRU_OP
template <typename Dtype>
class GruParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  /**
   *
   * @param inputs
   * @param outputs
   * @param attrs
   * @param scope
   * */
  GruParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2904 2905 2906 2907 2908 2909 2910 2911
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_input_ = InputFrom<GType>(inputs, *scope);
    input_h0_ = InputH0From<GType>(inputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_weight_ = InputWeightFrom<GType>(inputs, *scope);

    output_batch_gate_ = OutputBatchGateFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2912
    output_batch_reset_hidden_prev_ =
2913 2914 2915
        OutputBatchResetHiddenPrevFrom<GType>(outputs, *scope);
    output_batch_hidden_ = OutputBatchHiddenFrom<GType>(outputs, *scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, *scope);
2916 2917
    activation_ = GetStringAttr("activation", attrs);
    gate_activation_ = GetStringAttr("gate_activation", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942 2943 2944 2945 2946 2947 2948 2949 2950
    is_reverse_ = GetAttr<bool>("is_reverse", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputH0() const { return input_h0_; }
  const GType *InputBias() const { return input_bias_; }
  const std::string &Activation() const { return activation_; }
  const std::string &GateActivation() const { return gate_activation_; }
  const bool &IsReverse() const { return is_reverse_; }

  GType *OutBatchGate() const { return output_batch_gate_; }
  GType *OutBatchResetHiddenPrev() const {
    return output_batch_reset_hidden_prev_;
  }
  GType *OutBatchHidden() const { return output_batch_hidden_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_h0_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_batch_gate_;
  GType *output_batch_reset_hidden_prev_;
  GType *output_batch_hidden_;
  GType *output_hidden_;
  std::string activation_;
  std::string gate_activation_;
  bool is_reverse_;
};
#endif

Z
zhaojiaying01 已提交
2951 2952 2953 2954 2955 2956 2957
#ifdef GRU_UNIT_OP
template <typename Dtype>
class GruUnitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  GruUnitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2958 2959 2960 2961 2962 2963 2964 2965
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_input_ = InputFrom<GType>(inputs, *scope);
    input_hidden_prev_ = InputHiddenPrevFrom<GType>(inputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_weight_ = InputWeightFrom<GType>(inputs, *scope);

    output_gate_ = OutputGateFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
2966
    output_reset_hidden_prev_ =
2967 2968
        OutputResetHiddenPrevFrom<GType>(outputs, *scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
2969 2970 2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981 2982 2983 2984 2985 2986 2987 2988 2989 2990 2991 2992 2993 2994 2995 2996
    activation_ = GetAttr<int>("activation", attrs);
    gate_activation_ = GetAttr<int>("gate_activation", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputHiddenPrev() const { return input_hidden_prev_; }
  const GType *InputBias() const { return input_bias_; }
  const int &Activation() const { return activation_; }
  const int &GateActivation() const { return gate_activation_; }

  GType *OutGate() const { return output_gate_; }
  GType *OutResetHiddenPrev() const { return output_reset_hidden_prev_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_hidden_prev_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_gate_;
  GType *output_reset_hidden_prev_;
  GType *output_hidden_;
  int activation_;
  int gate_activation_;
};
#endif

2997 2998 2999 3000 3001 3002 3003 3004
#ifdef FLATTEN_OP
template <typename Dtype>
class FlattenParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FlattenParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3005 3006 3007 3008
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3009
    axis = GetAttr<int>("axis", attrs);
3010
  }
3011 3012
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3013
  const int &Axis() const { return axis; }
3014 3015

 private:
3016 3017
  GType *input_x_;
  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3018
  int axis;
3019 3020 3021 3022 3023 3024 3025 3026 3027 3028 3029
};
#endif

#ifdef SPLIT_OP
template <typename Dtype>
class SplitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SplitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3030 3031 3032 3033
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    outs_ = OutMultiFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3034
    axis = GetAttr<int>("axis", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
3035 3036 3037 3038 3039 3040
    num = GetAttr<int>("num", attrs);
    sections = GetAttr<std::vector<int>>("sections", attrs);

    //    for (int i = 0; i < outs_.size(); ++i) {
    //      out_ts_.push_back(*scope.FindVar(outs_[i])->GetMutable());
    //    }
3041
  }
3042
  GType *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3043 3044 3045 3046 3047
  std::vector<GType *> Outs() const { return outs_; }
  int Axis() const { return axis; }
  int Num() const { return num; }
  std::vector<int> Sections() const { return sections; }
  //  std::vector<GType> OutTs() const { return out_ts_; }
3048 3049

 private:
3050
  GType *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3051
  std::vector<GType *> outs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3052
  int axis;
xiebaiyuan's avatar
xiebaiyuan 已提交
3053 3054
  int num;
  std::vector<int> sections;
3055
//  std::vector<GType> out_ts_;
3056 3057 3058 3059 3060 3061 3062 3063 3064
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::SplitArgs fpga_split_args;

 public:
  const fpga::SplitArgs &FpgaArgs() const { return fpga_split_args; }
  void SetFpgaArgs(const fpga::SplitArgs &args) { fpga_split_args = args; }
#endif
3065 3066 3067 3068 3069 3070 3071 3072 3073 3074 3075 3076
};
#endif

#ifdef BILINEAR_INTERP_OP
template <typename Dtype>
class BilinearInterpParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  BilinearInterpParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
3077 3078 3079 3080 3081
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_outsize_ = InputOutSizeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3082 3083
    out_h_ = GetAttr<int>("out_h", attrs);
    out_w_ = GetAttr<int>("out_w", attrs);
3084
  }
3085
  const GType *InputX() const { return input_x_; }
3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107 3108 3109 3110 3111
  const GType *InputOutPutSize() const { return input_outsize_; }
  GType *Out() const { return out_; }
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }

 private:
  GType *input_x_;
  GType *input_outsize_;
  GType *out_;
  int out_h_;
  int out_w_;
};
#endif

#ifdef NEAREST_INTERP_OP
template <typename Dtype>
class NearestInterpolationParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  NearestInterpolationParam(const VariableNameMap &inputs,
                            const VariableNameMap &outputs,
                            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3112 3113 3114 3115 3116 3117
    const bool has_out_size = HasVar("OutSize", inputs);

    if (has_out_size) {
      input_outsize_ = InputOutSizeFrom<GType>(inputs, *scope);
    }

3118
    out_ = OutFrom<GType>(outputs, *scope);
3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136

    if (HasAttr("out_h", attrs)) {
      out_h_ = GetAttr<int>("out_h", attrs);
    } else if (HasAttr("out_h ", attrs)) {
      // some models hurts ....   attr with space ..
      out_h_ = GetAttr<int>("out_h ", attrs);
    }

    if (HasAttr("out_w", attrs)) {
      out_w_ = GetAttr<int>("out_w", attrs);
    } else if (HasAttr("out_w ", attrs)) {
      // some models hurts ....   attr with space ..
      out_w_ = GetAttr<int>("out_w ", attrs);
    }

    LOG(kLOG_DEBUG1) << "out_h_: " << out_h_;
    LOG(kLOG_DEBUG1) << "out_w_: " << out_w_;

3137 3138 3139 3140
    if (HasAttr("scale", attrs)) {
      has_scale_ = true;
      scale_ = GetAttr<float>("scale", attrs);
    }
3141 3142
    LOG(kLOG_DEBUG1) << "has_scale_:  " << has_scale_;
    LOG(kLOG_DEBUG1) << "scale_:  " << scale_;
3143 3144
  }
  const GType *InputX() const { return input_x_; }
3145 3146
  const GType *InputOutPutSize() const { return input_outsize_; }
  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3147 3148
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }
3149 3150
  float Scale() const { return scale_; }
  bool HasScale() const { return has_scale_; }
3151 3152

 private:
3153 3154 3155
  GType *input_x_;
  GType *input_outsize_;
  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3156 3157
  int out_h_;
  int out_w_;
3158 3159
  float scale_;
  bool has_scale_;
3160 3161 3162 3163 3164 3165 3166 3167 3168 3169 3170
};
#endif

#ifdef SHAPE_OP
template <typename Dtype>
class ShapeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ShapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3171 3172 3173 3174
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
3175
  }
3176 3177
  const GType *Input() const { return input_; }
  GType *Out() const { return out_; }
3178 3179

 private:
3180 3181
  GType *input_;
  GType *out_;
3182 3183 3184
};
#endif

H
hjchen2 已提交
3185 3186 3187 3188 3189 3190 3191 3192
#ifdef TOP_K_OP
template <typename Dtype>
class TopKParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  TopKParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3193 3194 3195 3196 3197
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
    indices_ = OpParam::GetVarValue<GType>("Indices", outputs, *scope);
H
hjchen2 已提交
3198 3199 3200 3201
    k_ = OpParam::GetAttr<int>("k", attrs);
  }

 public:
3202 3203 3204
  GType *input_;
  GType *output_;
  GType *indices_;
H
hjchen2 已提交
3205 3206 3207 3208 3209 3210 3211 3212 3213 3214 3215 3216
  int k_;
};
#endif  // TOP_K_OP

#ifdef CAST_OP
template <typename Dtype>
class CastParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  CastParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3217 3218 3219 3220
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
H
hjchen2 已提交
3221 3222 3223 3224 3225
    input_type_ = OpParam::GetAttr<int>("in_dtype", attrs);
    output_type_ = OpParam::GetAttr<int>("out_dtype", attrs);
  }

 public:
3226 3227
  GType *input_;
  GType *output_;
H
hjchen2 已提交
3228 3229 3230 3231 3232
  int input_type_;
  int output_type_;
};
#endif  // CAST_OP

3233
#ifdef QUANT_OP
3234
template <typename Dtype>
3235 3236 3237 3238 3239
class QuantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3240
  QuantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3241 3242 3243 3244
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3245 3246
    // online
    // scale = max(abs(x))
3247
    online_scale_ = OpParam::GetVarValue<GType>("OutScale", outputs, *scope);
3248
    // offline
3249
    if (inputs.count("InScale")) {
3250
      offline_ = true;
3251
      offline_scale_ = OpParam::GetVarValue<GType>("InScale", inputs, *scope);
3252 3253
    }
    // x = round(scale * x)
3254 3255
    if (OpParam::HasAttr("round_type", attrs)) {
      round_type_ = OpParam::GetAttr<RoundType>("round_type", attrs);
H
hjchen2 已提交
3256
    }
3257 3258 3259 3260
  }

 public:
  // op input
3261
  GType *input_;
3262
  // op output
3263
  GType *output_;
3264
  GType *online_scale_;
3265
  // quantize offline scale
3266
  GType *offline_scale_;
3267 3268
  // if offine scale or not
  bool offline_ = false;
3269
  // round method type
3270 3271
  // RoundType round_type_ = ROUND_NEAREST_AWAY_ZERO;
  RoundType round_type_ = ROUND_NEAREST_TOWARDS_ZERO;
3272
};
3273
#endif
3274

3275
#ifdef DEQUANT_OP
3276
template <typename Dtype>
3277 3278 3279 3280 3281
class DequantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3282
  DequantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3283 3284 3285 3286 3287
                  const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
    activation_scale_ = OpParam::GetVarValue<GType>("Scale", inputs, *scope);
3288
    // dequantization is performed as x = x / static_scale / online_scale
3289 3290
    if (OpParam::HasAttr("weight_scale", attrs)) {
      weight_scale_ = OpParam::GetAttr<float>("weight_scale", attrs);
3291
    } else {
3292
      weight_scale_ = OpParam::GetAttr<float>("max_range", attrs);
3293 3294 3295 3296 3297
    }
  }

 public:
  // op input
3298
  GType *input_;
3299
  // op output
3300
  GType *output_;
3301
  GType *activation_scale_;
3302 3303
  float weight_scale_;
};
3304
#endif
3305

3306 3307 3308 3309
#if defined(FUSION_DEQUANT_BN_OP) || defined(FUSION_DEQUANT_ADD_BN_OP) || \
    defined(FUSION_DEQUANT_ADD_BN_RELU_OP) ||                             \
    defined(FUSION_DEQUANT_BN_RELU_OP) ||                                 \
    defined(FUSION_DEQUANT_ADD_BN_QUANT_OP) ||                            \
3310
    defined(FUSION_DEQUANT_ADD_BN_RELU_QUANT_OP)
H
hjchen2 已提交
3311
template <typename Dtype>
3312
class FusionDequantBNParam : public DequantizeParam<Dtype> {
H
hjchen2 已提交
3313 3314 3315 3316
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3317 3318
  FusionDequantBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
3319
                       const AttributeMap &attrs, Scope *scope)
H
hjchen2 已提交
3320 3321
      : DequantizeParam<Dtype>(inputs, outputs, attrs, scope) {
    // batch norm params
3322 3323 3324 3325
    bn_mean_ = OpParam::GetVarValue<GType>("BNMean", inputs, *scope);
    bn_variance_ = OpParam::GetVarValue<GType>("BNVariance", inputs, *scope);
    bn_scale_ = OpParam::GetVarValue<GType>("BNScale", inputs, *scope);
    bn_bias_ = OpParam::GetVarValue<GType>("BNBias", inputs, *scope);
H
hjchen2 已提交
3326 3327 3328 3329 3330
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
  }

 public:
  // batch norm
3331 3332 3333 3334
  GType *bn_mean_;
  GType *bn_variance_;
  GType *bn_scale_;
  GType *bn_bias_;
H
hjchen2 已提交
3335
  float epsilon_;
3336 3337 3338
};
#endif

3339 3340 3341 3342
#if defined(FUSION_DEQUANT_ADD_BN_RELU_OP) ||  \
    defined(FUSION_DEQUANT_ADD_BN_OP) ||       \
    defined(FUSION_DEQUANT_ADD_BN_QUANT_OP) || \
    defined(FUSION_DEQUANT_ADD_BN_RELU_QUANT_OP)
3343 3344 3345 3346 3347 3348 3349 3350
template <typename Dtype>
class FusionDequantAddBNParam : public FusionDequantBNParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDequantAddBNParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
3351
                          const AttributeMap &attrs, Scope *scope)
3352 3353 3354
      : FusionDequantBNParam<Dtype>(inputs, outputs, attrs, scope) {
    // element wise add params
    axis_ = OpParam::GetAttr<int>("axis", attrs);
3355
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
3356 3357 3358 3359 3360
  }

 public:
  // elementwise add
  int axis_;
3361
  GType *bias_;
3362 3363 3364
};
#endif

3365 3366 3367 3368 3369 3370 3371 3372 3373
#ifdef FUSION_DEQUANT_ADD_BN_QUANT_OP
template <typename Dtype>
class FusionDequantAddBNQuantParam : public FusionDequantAddBNParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDequantAddBNQuantParam(const VariableNameMap &inputs,
                               const VariableNameMap &outputs,
3374
                               const AttributeMap &attrs, Scope *scope)
3375 3376
      : FusionDequantAddBNParam<Dtype>(inputs, outputs, attrs, scope) {
    // scale output
3377
    online_scale_ = OpParam::GetVarValue<GType>("OutScale", outputs, *scope);
3378
    // offline
3379 3380
    if (inputs.count("InScale")) {
      offline_ = true;
3381
      offline_scale_ = OpParam::GetVarValue<GType>("InScale", inputs, *scope);
3382 3383 3384 3385 3386 3387 3388 3389
    }
    // x = round(scale * x)
    if (OpParam::HasAttr("round_type", attrs)) {
      round_type_ = OpParam::GetAttr<RoundType>("round_type", attrs);
    }
  }

 public:
3390
  GType *online_scale_;
3391
  // quantize offline scale
3392
  GType *offline_scale_;
3393 3394
  // if offine scale or not
  bool offline_ = false;
3395 3396 3397 3398 3399 3400
  // round method type
  // RoundType round_type_ = ROUND_NEAREST_AWAY_ZERO;
  RoundType round_type_ = ROUND_NEAREST_TOWARDS_ZERO;
};
#endif

3401 3402 3403 3404 3405 3406 3407 3408 3409
#ifdef SEQUENCE_EXPAND_OP
template <typename Dtype>
class SequenceExpandParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SequenceExpandParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
3410 3411 3412 3413 3414
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3415 3416 3417 3418 3419 3420 3421 3422 3423 3424 3425 3426 3427 3428 3429 3430 3431 3432 3433 3434 3435 3436 3437
    ref_level_ = -1;
    if (OpParam::HasAttr("ref_level", attrs)) {
      ref_level_ = OpParam::GetAttr<int>("ref_level", attrs);
    }
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  int ref_level_;
};
#endif  // SEQUENCE_EXPAND_OP

#ifdef SEQUENCE_POOL_OP
template <typename Dtype>
class SequencePoolParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SequencePoolParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3438 3439 3440 3441
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3442 3443
    pool_type_ = "MAX";
    if (OpParam::HasAttr("pooltype", attrs)) {
H
hjchen2 已提交
3444
      pool_type_ = OpParam::GetStringAttr("pooltype", attrs);
3445 3446 3447 3448 3449 3450 3451 3452 3453 3454
    }
  }

 public:
  GType *input_;
  GType *output_;
  std::string pool_type_;
};
#endif  // SEQUENCE_EXPAND_OP

3455 3456 3457 3458 3459 3460 3461 3462
#ifdef LOD_RESET_OP
template <typename Dtype>
class LodResetParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LodResetParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3463 3464 3465 3466
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3467 3468
    input_y_ = nullptr;
    if (inputs.count("Y")) {
3469
      input_y_ = InputYFrom<GType>(inputs, *scope);
3470 3471 3472
    } else {
      target_lod_ = OpParam::GetAttr<vector<int>>("target_lod", attrs);
    }
Z
zp7 已提交
3473 3474 3475
    if (HasAttr("append", attrs)) {
      append = OpParam::GetAttr<bool>("append", attrs);
    }
3476 3477 3478 3479 3480 3481 3482
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  std::vector<int> target_lod_;
3483
  bool append;
3484 3485 3486
};
#endif  // LOD_RESET_OP

3487 3488 3489 3490 3491 3492 3493 3494
#ifdef LESS_THAN_OP
template <typename Dtype>
class CompareParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  CompareParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3495 3496 3497 3498 3499
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3500 3501 3502 3503 3504 3505 3506 3507 3508 3509 3510
    axis_ = OpParam::GetAttr<int>("axis", attrs);
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  int axis_;
};
#endif  // LESS_THAN_OP

Z
zhaojiaying01 已提交
3511
#if defined(LOGICAL_AND_OP) || defined(LOGICAL_OR_OP) || defined(LOGICAL_XOR_OP)
3512
template <typename Dtype>
Z
zhaojiaying01 已提交
3513
class LogicalBinaryParam : public OpParam {
3514 3515 3516 3517
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
Z
zhaojiaying01 已提交
3518 3519
  LogicalBinaryParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
3520 3521 3522 3523 3524
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3525 3526 3527 3528 3529 3530 3531 3532 3533 3534 3535
  }

  const GType *InputX() const { return input_x_; }
  const GType *InputY() const { return input_y_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
};
Z
zhaojiaying01 已提交
3536
#endif  // LOGICAL_AND_OP LOGICAL_OR_OP LOGICAL_XOR_OP
3537 3538 3539

#ifdef LOGICAL_NOT_OP
template <typename Dtype>
Z
zhaojiaying01 已提交
3540
class LogicalUnaryParam : public OpParam {
3541 3542 3543 3544
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
Z
zhaojiaying01 已提交
3545 3546
  LogicalUnaryParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3547 3548 3549 3550
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3551 3552 3553 3554 3555 3556 3557 3558 3559 3560 3561
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *output_;
};
#endif  // LOGICAL_NOT_OP

3562 3563 3564
#ifdef WRITE_TO_ARRAY_OP
template <typename Dtype>
class WriteToArrayParam : public OpParam {
H
hjchen2 已提交
3565 3566 3567
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

3568 3569 3570
 public:
  WriteToArrayParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3571 3572
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
3573 3574 3575
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    index_ = OpParam::GetVarValue<GType>("I", inputs, *scope);
    output_ = OpParam::GetVarValue<std::vector<GType>>("Out", outputs, *scope);
3576 3577 3578
  }

 public:
H
hjchen2 已提交
3579 3580 3581
  GType *input_;
  GType *index_;
  std::vector<GType> *output_;
3582 3583 3584 3585 3586 3587
};
#endif

#ifdef READ_FROM_ARRAY_OP
template <typename Dtype>
class ReadFromArrayParam : public OpParam {
H
hjchen2 已提交
3588 3589 3590
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

3591 3592 3593
 public:
  ReadFromArrayParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
3594 3595
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
3596 3597 3598
    input_ = OpParam::GetVarValue<std::vector<GType>>("X", inputs, *scope);
    index_ = OpParam::GetVarValue<GType>("I", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
3599 3600 3601
  }

 public:
H
hjchen2 已提交
3602 3603 3604
  std::vector<GType> *input_;
  GType *index_;
  GType *output_;
3605 3606 3607
};
#endif

Z
zhaojiaying01 已提交
3608 3609 3610 3611 3612 3613 3614 3615
#ifdef IS_EMPTY_OP
template <typename Dtype>
class IsEmptyParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  IsEmptyParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3616 3617 3618 3619
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631 3632 3633 3634 3635 3636 3637 3638
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *output_;
};
#endif  // IS_EMPTY_OP

#ifdef INCREMENT_OP
template <typename Dtype>
class IncrementParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  IncrementParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
3639
                 const AttributeMap &attrs, Scope *scope)
3640
      : OpParam(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
3641 3642
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
H
update  
hjchen2 已提交
3643
    step_ = OpParam::GetAttr<float>("step", attrs);
Z
zhaojiaying01 已提交
3644 3645 3646 3647
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }
H
update  
hjchen2 已提交
3648
  float Step() const { return step_; }
Z
zhaojiaying01 已提交
3649 3650 3651 3652

 public:
  GType *input_x_;
  GType *output_;
H
update  
hjchen2 已提交
3653
  float step_;
Z
zhaojiaying01 已提交
3654 3655
};
#endif  // INCREMENT_OP
3656 3657
#ifdef PAD2D_OP
template <typename Dtype>
3658
class Pad2DParam : public OpParam {
3659 3660 3661 3662
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3663
  Pad2DParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3664 3665 3666 3667
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
3668 3669 3670 3671
    paddings_ = OpParam::GetAttr<std::vector<int>>("paddings", attrs);
    pad_value_ = OpParam::GetAttr<float>("pad_value", attrs);
    mode_ = OpParam::GetStringAttr("mode", attrs);
    DLOG << "mode" << mode_;
3672
  }
3673 3674 3675 3676 3677 3678
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }

  std::vector<int> paddings_;
  float pad_value_;
  std::string mode_;
3679 3680

 private:
3681 3682
  GType *input_x_;
  GType *out_;
3683 3684
};
#endif
H
Huie 已提交
3685 3686 3687 3688 3689
#ifdef EXP_OP
template <typename Dtype>
class EXPParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
Z
zhaojiaying01 已提交
3690

H
Huie 已提交
3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705
 public:
  EXPParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
  }
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }

 private:
  GType *input_x_;
  GType *out_;
};
#endif
3706 3707 3708 3709 3710 3711 3712 3713 3714 3715 3716 3717 3718 3719 3720 3721 3722 3723 3724 3725 3726 3727 3728 3729 3730 3731 3732 3733 3734 3735

#ifdef PIXEL_SHUFFLE_OP
template <typename Dtype>
class PixelShuffleParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  PixelShuffleParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    upscale_factor_ = GetAttr<int>("upscale_factor", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *Out() const { return out_; }

  const int &upscale_factor() const { return upscale_factor_; }

 private:
  GType *input_x_;
  GType *out_;
  int upscale_factor_;
};
#endif

3736
#ifdef GRID_SAMPLER_OP
3737
template <typename Dtype>
3738
class GridSamplerParam : public OpParam {
3739 3740 3741 3742
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3743 3744 3745
  GridSamplerParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
                   Scope *scope)
3746 3747
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3748
    grid_ = GridFrom<GType>(inputs, *scope);
3749
    output_ = OutputFrom<GType>(outputs, *scope);
3750 3751 3752
  }

  const GType *InputX() const { return input_x_; }
3753
  const GType *Grid() const { return grid_; }
3754

3755
  GType *Output() const { return output_; }
3756 3757 3758

 private:
  GType *input_x_;
3759
  GType *grid_;
3760
  GType *output_;
3761 3762 3763
};
#endif

3764
#ifdef EXPAND_OP
3765
template <typename Dtype>
3766
class ExpandParam : public OpParam {
3767 3768 3769 3770
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3771 3772
  ExpandParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const AttributeMap &attrs, Scope *scope)
3773 3774
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3775 3776
    out_ = OutFrom<GType>(outputs, *scope);
    expand_times = OpParam::GetAttr<std::vector<int>>("expand_times", attrs);
3777 3778 3779 3780
  }

  const GType *InputX() const { return input_x_; }

3781 3782 3783
  GType *Out() const { return out_; }

  std::vector<int> expand_times;
3784 3785 3786

 private:
  GType *input_x_;
3787
  GType *out_;
3788 3789
};

3790
#endif
朔-望's avatar
朔-望 已提交
3791 3792
}  // namespace operators
}  // namespace paddle_mobile