op_param.h 112.4 KB
Newer Older
W
wangliu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
朔-望's avatar
朔-望 已提交
14

15
#pragma once
朔-望's avatar
朔-望 已提交
16

17
#include <memory>
E
eclipsess 已提交
18
#include <string>
W
wangliu 已提交
19
#include <vector>
L
liuruilong 已提交
20
#include "common/log.h"
朔-望's avatar
朔-望 已提交
21
#include "common/type_define.h"
N
nhzlx 已提交
22
#include "common/types.h"
23
#include "framework/attribute.h"
朔-望's avatar
朔-望 已提交
24 25 26
#include "framework/lod_tensor.h"
#include "framework/scope.h"
#include "framework/tensor.h"
27
#include "framework/type_trait.h"
朔-望's avatar
朔-望 已提交
28
#include "framework/variable.h"
Z
zhangyang 已提交
29 30 31 32 33 34 35

#ifdef PADDLE_MOBILE_FPGA_V1
#include "fpga/V1/api.h"
#endif

#ifdef PADDLE_MOBILE_FPGA_V2
#include "fpga/V2/api.h"
Z
zhangyang 已提交
36
#endif
朔-望's avatar
朔-望 已提交
37

C
Chon 已提交
38 39 40 41
#ifdef PADDLE_MOBILE_FPGA_KD
#include "fpga/KD/context.hpp"
#endif

L
liuruilong 已提交
42 43
#ifdef PADDLE_MOBILE_CL
#include "framework/cl/cl_image.h"
Z
zhangyang 已提交
44
#endif
朔-望's avatar
朔-望 已提交
45 46

namespace paddle_mobile {
朔-望's avatar
朔-望 已提交
47 48
namespace operators {

W
wangliu 已提交
49 50 51 52 53
using framework::Attribute;
using framework::AttributeMap;
using framework::LoDTensor;
using framework::Scope;
using framework::Tensor;
E
eclipsess 已提交
54
using framework::Variable;
W
wangliu 已提交
55 56
using std::string;
using std::vector;
朔-望's avatar
朔-望 已提交
57

58
using framework::DtypeTensorTrait;
L
liuruilong 已提交
59

60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
template <typename Dtype>
class CLImageDeleter {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  void operator()(GType *ptr) {
#ifdef PADDLE_MOBILE_CL
    framework::CLImage *image = dynamic_cast<framework::CLImage *>(ptr);
    if (image) {
      delete image;
    }
#endif
  }
};

L
liuruilong 已提交
75
class OpParam {
76 77
 public:
  OpParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
78 79
          const AttributeMap &attrs, Scope *scope)
      : scope_(scope) {}
80

81 82
  Scope *GetScope() const { return scope_; }
  Scope *scope_ = nullptr;
83

C
Chon 已提交
84 85 86 87 88 89
#ifdef PADDLE_MOBILE_FPGA_KD
  zynqmp::Context &context() { return context_; }

  zynqmp::Context context_;
#endif

朔-望's avatar
朔-望 已提交
90
 protected:
xiebaiyuan's avatar
xiebaiyuan 已提交
91 92 93 94
  template <typename T>
  static T *InputH0From(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("H0", inputs, scope);
  }
Z
zhaojiaying01 已提交
95 96 97 98 99 100 101

  template <typename T>
  static T *InputHiddenPrevFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("HiddenPrev", inputs, scope);
  }

102 103 104 105 106
  template <typename T>
  static T *InputAlphaFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Alpha", inputs, scope);
  }

107 108 109 110 111 112 113 114 115
  template <typename T>
  static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Input", inputs, scope);
  }

  template <typename T>
  static T *InputXFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("X", inputs, scope);
  }
116 117 118 119 120
  template <typename T>
  static T *InputOutSizeFrom(const VariableNameMap &inputs,
                             const Scope &scope) {
    return GetVarValue<T>("OutSize", inputs, scope);
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147

  template <typename T>
  static T *InputWFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("W", inputs, scope);
  }

  template <typename T>
  static T *InputIdsFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Ids", inputs, scope);
  }

  template <typename T>
  static T *InputEmissionFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Emission", inputs, scope);
  }

  template <typename T>
  static T *InputTransitionFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("Transition", inputs, scope);
  }
  template <typename T>
  static T *InputLabelFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Label", inputs, scope);
  }

148 149 150 151
  template <typename T>
  static T *InputXFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("addX", inputs, scope);
  }
152 153 154 155 156 157

  template <typename T>
  static T *InputYFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Y", inputs, scope);
  }

158 159 160 161 162
  template <typename T>
  static T *InputYFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("Y", inputs, scope);
  }

E
eclipsess 已提交
163 164 165 166 167
  template <typename T>
  static T *InputZFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Z", inputs, scope);
  }

168 169 170 171 172
  template <typename T>
  static T *InputBiasFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Bias", inputs, scope);
  }
  template <typename T>
xiebaiyuan's avatar
xiebaiyuan 已提交
173 174 175 176
  static T *InputWeightFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Weight", inputs, scope);
  }
  template <typename T>
177 178 179 180 181 182 183 184 185 186 187 188
  static T *InputVarianceFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Variance", inputs, scope);
  }
  template <typename T>
  static T *InputMeanFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Mean", inputs, scope);
  }
  template <typename T>
  static T *InputScaleFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scale", inputs, scope);
  }
E
eclipsess 已提交
189 190 191 192
  template <typename T>
  static T *InputImageFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Image", inputs, scope);
  }
E
eclipsess 已提交
193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
  template <typename T>
  static T *InputPriorBoxFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("PriorBox", inputs, scope);
  }
  template <typename T>
  static T *InputPriorBoxVarFrom(const VariableNameMap &inputs,
                                 const Scope &scope) {
    return GetVarValue<T>("PriorBoxVar", inputs, scope);
  }
  // LoDTensor but now use Tensor
  template <typename T>
  static T *InputTargetBoxFrom(const VariableNameMap &inputs,
                               const Scope &scope) {
    return GetVarValue<T>("TargetBox", inputs, scope);
  }
209

E
eclipsess 已提交
210 211 212 213 214 215 216 217 218 219
  template <typename T>
  static T *InputBBoxesFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("BBoxes", inputs, scope);
  }

  template <typename T>
  static T *InputScoresFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scores", inputs, scope);
  }

E
eclipsess 已提交
220 221 222 223
  template <typename T>
  static T *InputShapeFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Shape", inputs, scope);
  }
E
eclipsess 已提交
224

225
  template <typename T>
W
wangliu 已提交
226 227
  static vector<T *> InputMultiFrom(const VariableNameMap &inputs,
                                    const Scope &scope) {
228 229 230
    return GetMultiVarValue<T>("X", inputs, scope);
  }

E
eclipsess 已提交
231 232 233 234 235
  static vector<Variable *> InputMultiVarsFrom(const VariableNameMap &inputs,
                                               const Scope &scope) {
    return GetMultiVar("X", inputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
236 237 238 239 240 241
  template <typename T>
  static T *OutputBatchGateFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("BatchGate", outputs, scope);
  }

Z
zhaojiaying01 已提交
242 243 244 245 246
  template <typename T>
  static T *OutputGateFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Gate", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
247 248 249 250 251 252 253 254 255 256 257
  template <typename T>
  static T *OutputViterbiPathFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("ViterbiPath", outputs, scope);
  }
  template <typename T>
  static T *OutputBatchResetHiddenPrevFrom(const VariableNameMap &outputs,
                                           const Scope &scope) {
    return GetVarValue<T>("BatchResetHiddenPrev", outputs, scope);
  }

Z
zhaojiaying01 已提交
258 259 260 261 262 263
  template <typename T>
  static T *OutputResetHiddenPrevFrom(const VariableNameMap &outputs,
                                      const Scope &scope) {
    return GetVarValue<T>("ResetHiddenPrev", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
264 265 266 267 268 269 270 271 272 273 274 275
  template <typename T>
  static T *OutputBatchHiddenFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("BatchHidden", outputs, scope);
  }

  template <typename T>
  static T *OutputHiddenFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("Hidden", outputs, scope);
  }

276 277 278 279 280
  template <typename T>
  static T *OutputFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Output", outputs, scope);
  }

E
eclipsess 已提交
281 282 283 284 285
  static Variable *OutVarFrom(const VariableNameMap &outputs,
                              const Scope &scope) {
    return GetVar("Out", outputs, scope);
  }

286 287 288 289 290
  template <typename T>
  static T *OutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Out", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
291 292 293 294 295 296
  template <typename T>
  static vector<T *> OutMultiFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetMultiVarValue<T>("Out", outputs, scope);
  }

297 298 299 300 301
  template <typename T>
  static T *OutputYFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Y", outputs, scope);
  }

L
lijiancheng0614 已提交
302 303 304 305 306 307
  template <typename T>
  static T *OutputXShapeFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("XShape", outputs, scope);
  }

E
eclipsess 已提交
308 309 310 311 312 313
  template <typename T>
  static T *OutputBoxesFrom(const VariableNameMap &outputs,
                            const Scope &scope) {
    return GetVarValue<T>("Boxes", outputs, scope);
  }

E
eclipsess 已提交
314 315 316 317 318
  template <typename T>
  static T *OutputBoxFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("OutputBox", outputs, scope);
  }

Z
zhaojiaying01 已提交
319 320 321 322 323
  template <typename T>
  static T *OutputNormFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Norm", outputs, scope);
  }

E
eclipsess 已提交
324 325 326 327 328 329
  template <typename T>
  static T *OutputVariancesFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("Variances", outputs, scope);
  }

330 331 332 333 334 335 336 337 338 339
  template <typename T>
  static T *MidOutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("MidOut", outputs, scope);
  }

  template <typename T>
  static T *FilterFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Filter", inputs, scope);
  }

340 341 342 343 344
  template <typename T>
  static T *GridFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Grid", inputs, scope);
  }

345
  template <typename T>
W
wangliu 已提交
346
  static const T GetAttr(const string &key, const AttributeMap &map) {
347 348
    return ((Attribute)map.at(key)).Get<T>();
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
349 350
  static const std::string GetStringAttr(const string &key,
                                         const AttributeMap &map) {
351 352
    return ((Attribute)map.at(key)).GetString();
  }
353

354 355 356 357
  static const bool HasAttr(const string &key, const AttributeMap &map) {
    return map.count(key) > 0;
  }

358
  template <typename T>
W
wangliu 已提交
359
  static T *GetVarValue(const string &key, const VariableNameMap &var_map,
360
                        const Scope &scope) {
W
wangliu 已提交
361 362
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
363 364 365 366 367 368
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
朔-望's avatar
朔-望 已提交
369
    }
370
  }
朔-望's avatar
朔-望 已提交
371

E
eclipsess 已提交
372 373 374 375 376 377 378 379 380 381 382 383 384
  static Variable *GetVar(const string &key, const VariableNameMap &var_map,
                          const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var;
    } else {
      return nullptr;
    }
  }

385
  static std::string Getkey(const string &key, const VariableNameMap &var_map,
386
                            int index) {
387 388
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > index,
                          "%s is not contained in var_map", key.c_str())
389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406
    auto var_vec = var_map.at(key);
    return var_vec[index];
  }

  template <typename T>
  static T *GetVarValue1(const string &key, const VariableNameMap &var_map,
                         const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[1]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
    }
  }

407
  template <typename T>
W
wangliu 已提交
408 409 410
  static vector<T *> GetMultiVarValue(const string &key,
                                      const VariableNameMap &var_map,
                                      const Scope &scope) {
411 412
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
W
wangliu 已提交
413
    vector<T *> var_res;
414 415 416
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var->GetMutable<T>());
朔-望's avatar
朔-望 已提交
417
    }
418 419
    return var_res;
  }
E
eclipsess 已提交
420 421 422 423 424 425 426 427 428 429 430 431 432

  static vector<Variable *> GetMultiVar(const string &key,
                                        const VariableNameMap &var_map,
                                        const Scope &scope) {
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
    vector<Variable *> var_res;
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var);
    }
    return var_res;
  }
朔-望's avatar
朔-望 已提交
433 434
};

435 436 437 438 439 440
#define GET_VAR_AS_TENSOR(name, name_dict, scope) \
  OpParam::GetVarValue<framework::Tensor>(name, name_dict, scope)

#define GET_VAR_AS_LOD_TENSOR(name, name_dict, scope) \
  OpParam::GetVarValue<framework::LoDTensor>(name, name_dict, scope)

N
nhzlx 已提交
441
template <typename Dtype>
442
class ConvParam : public OpParam {
N
nhzlx 已提交
443 444 445
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
446
 public:
447
  ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
448 449 450 451
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    filter_ = OpParam::FilterFrom<GType>(inputs, *scope);
    input_ = OpParam::InputFrom<GType>(inputs, *scope);
452
    if (outputs.count("Output")) {
453
      output_ = OpParam::OutputFrom<GType>(outputs, *scope);
454 455 456 457 458
    }
    strides_ = OpParam::GetAttr<vector<int>>("strides", attrs);
    paddings_ = OpParam::GetAttr<vector<int>>("paddings", attrs);
    dilations_ = OpParam::GetAttr<vector<int>>("dilations", attrs);
    groups = OpParam::GetAttr<int>("groups", attrs);
459
  }
朔-望's avatar
朔-望 已提交
460

461
  const GType *Input() const { return input_; }
朔-望's avatar
朔-望 已提交
462

463
  GType *Filter() const { return filter_; }
朔-望's avatar
朔-望 已提交
464

465
  GType *Output() const { return output_; }
朔-望's avatar
朔-望 已提交
466

W
wangliu 已提交
467
  const vector<int> &Strides() const { return strides_; }
朔-望's avatar
朔-望 已提交
468

W
wangliu 已提交
469
  const vector<int> &Paddings() const { return paddings_; }
朔-望's avatar
朔-望 已提交
470

W
wangliu 已提交
471
  const vector<int> &Dilations() const { return dilations_; }
朔-望's avatar
朔-望 已提交
472

H
hjchen2 已提交
473 474 475
  enum ExecMode {
    EXEC_INVALID = 0,
    EXEC_GEMM_FLOAT,
476 477
    EXEC_DEPTHWISE3x3S1_FLOAT,
    EXEC_DEPTHWISE3x3S2_FLOAT,
H
hjchen2 已提交
478 479
    EXEC_WINOGRAD3X3_FLOAT,
    EXEC_WINOGRAD5X5_FLOAT,
480
    EXEC_DEPTHWISE5x5_FLOAT,
H
hjchen2 已提交
481
    EXEC_GEMM_INT8,
H
hjchen2 已提交
482
    EXEC_DEPTHWISE3x3_INT8,
483
    EXEC_DEPTHWISE5x5_INT8,
S
StarryRain 已提交
484 485
    EXEC_SLIDINGWINDOW3x3S1_FLOAT,
    EXEC_SLIDINGWINDOW3x3S2_FLOAT,
486 487 488 489 490
    EXEC_DEPTHWISE3x3_FLOAT,
    EXEC_SLIDINGWINDOW1x1_FLOAT,
    EXEC_SLIDINGWINDOW3x3_FLOAT,
    EXEC_SLIDINGWINDOW5x5_FLOAT,
    EXEC_SLIDINGWINDOW7x7_FLOAT,
491
    EXEC_GEMM1x1s1_FLOAT,
492
    EXEC_DEPTHWISEBASIC_FLOAT,
H
hjchen2 已提交
493 494 495 496
  };

  ExecMode &ExecMode() const { return exec_mode_; }

497
  const int &Groups() const { return groups; }
朔-望's avatar
朔-望 已提交
498

499 500 501 502 503 504 505
#ifdef PADDLE_MOBILE_CL
  int Offset() const { return offset_; }

  int SetOffset(int in_offset) { offset_ = in_offset; }

#endif

H
hjchen2 已提交
506
 public:
507 508 509 510
  GType *input_;
  GType *output_;
  GType *filter_;
  GType *transformed_filter_;
W
wangliu 已提交
511 512 513
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
H
hjchen2 已提交
514
  mutable enum ExecMode exec_mode_;
515
  int groups;
516 517 518 519

#ifdef PADDLE_MOBILE_CL
  int offset_;
#endif
Z
zhangyang 已提交
520 521 522

#ifdef PADDLE_MOBILE_FPGA

H
hjchen2 已提交
523
 public:
Z
zhangyang 已提交
524 525 526 527 528
  fpga::SplitConvArgs fpga_conv_args;

 public:
  const fpga::SplitConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::SplitConvArgs &args) { fpga_conv_args = args; }
529 530 531 532 533 534 535

 public:
  fpga::DWconvArgs fpga_dwconv_args;

 public:
  const fpga::DWconvArgs &FpgaDwconvArgs() const { return fpga_dwconv_args; }
  void SetFpgaArgs(const fpga::DWconvArgs &args) { fpga_dwconv_args = args; }
Z
zhangyang 已提交
536
#endif
朔-望's avatar
朔-望 已提交
537
};
N
nhzlx 已提交
538 539
template <typename Dtype>
Print &operator<<(Print &printer, const ConvParam<Dtype> &conv_param);
朔-望's avatar
朔-望 已提交
540

N
nhzlx 已提交
541
template <typename Dtype>
542
class ElementwiseAddParam : public OpParam {
N
nhzlx 已提交
543 544 545
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
546
 public:
547
  ElementwiseAddParam(const VariableNameMap &inputs,
548
                      const VariableNameMap &outputs, const AttributeMap &attrs,
549 550 551 552 553
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
554 555 556
    axis_ = GetAttr<int>("axis", attrs);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
557
  const GType *InputX() const { return input_x_; }
558

xiebaiyuan's avatar
xiebaiyuan 已提交
559
  const GType *InputY() const { return input_y_; }
560

xiebaiyuan's avatar
xiebaiyuan 已提交
561
  GType *Out() const { return out_; }
562 563 564

  const int &Axis() const { return axis_; }

朔-望's avatar
朔-望 已提交
565
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
566 567 568
  GType *input_x_;
  GType *input_y_;
  GType *out_;
569
  int axis_;
Z
zhangyang 已提交
570 571 572
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
573
  fpga::EWAddArgs fpga_EW_add_args;
Z
zhangyang 已提交
574 575

 public:
H
hanbuhe 已提交
576 577
  const fpga::EWAddArgs &FpgaArgs() const { return fpga_EW_add_args; }
  void SetFpgaArgs(const fpga::EWAddArgs &args) { fpga_EW_add_args = args; }
qnqinan's avatar
qnqinan 已提交
578 579 580 581

 public:
  Tensor float_input_x, float_out;

Z
zhangyang 已提交
582
#endif
朔-望's avatar
朔-望 已提交
583 584
};

E
eclipsess 已提交
585
#ifdef ELEMENTWISEMUL_OP
E
eclipsess 已提交
586
template <typename Dtype>
587
class ElementwiseMulParam : public OpParam {
E
eclipsess 已提交
588 589 590 591 592 593
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ElementwiseMulParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
594 595 596 597 598
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614
    axis_ = GetAttr<int>("axis", attrs);
  }

  const GType *InputX() const { return input_x_; }

  const GType *InputY() const { return input_y_; }

  GType *Out() const { return out_; }

  const int &Axis() const { return axis_; }

 private:
  GType *input_x_;
  GType *input_y_;
  GType *out_;
  int axis_;
qnqinan's avatar
qnqinan 已提交
615 616 617 618 619 620
#ifdef PADDLE_MOBILE_FPGA

 public:
  Tensor float_input_x, float_out;

#endif
E
eclipsess 已提交
621
};
S
suiyang 已提交
622
#endif
E
eclipsess 已提交
623

624
#ifdef FUSION_ELEMENTWISEADDRELU_OP
N
nhzlx 已提交
625 626
template <typename Dtype>
using ElementwiseAddReluParam = ElementwiseAddParam<Dtype>;
L
liuruilong 已提交
627 628
#endif

629
#ifdef ELEMENTWISESUB_OP
630
template <typename Dtype>
631
class ElementwiseSubParam : public OpParam {
632 633 634 635 636 637
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ElementwiseSubParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
638 639 640 641 642
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659
    axis_ = GetAttr<int>("axis", attrs);
  }

  const GType *InputX() const { return input_x_; }

  const GType *InputY() const { return input_y_; }

  GType *Out() const { return out_; }

  const int &Axis() const { return axis_; }

 private:
  GType *input_x_;
  GType *input_y_;
  GType *out_;
  int axis_;
};
660
#endif
661

L
liuruilong 已提交
662
#ifdef MUL_OP
N
nhzlx 已提交
663
template <typename Dtype>
664
class MulParam : public OpParam {
N
nhzlx 已提交
665 666 667
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
668
 public:
669
  MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
670 671 672 673 674
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
675 676 677
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
  }
朔-望's avatar
朔-望 已提交
678

679
  GType *InputX() const { return input_x_; }
朔-望's avatar
朔-望 已提交
680

681
  GType *InputY() const { return input_y_; }
朔-望's avatar
朔-望 已提交
682

xiebaiyuan's avatar
xiebaiyuan 已提交
683
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
684

685
  const int &XNumColDims() const { return x_num_col_dims_; }
朔-望's avatar
朔-望 已提交
686

687
  const int &YNumColDims() const { return y_num_col_dims_; }
朔-望's avatar
朔-望 已提交
688

朔-望's avatar
朔-望 已提交
689
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
690 691 692
  GType *input_x_;
  GType *input_y_;
  GType *out_;
693 694
  int x_num_col_dims_;
  int y_num_col_dims_;
朔-望's avatar
朔-望 已提交
695
};
L
liuruilong 已提交
696
#endif
朔-望's avatar
朔-望 已提交
697

L
liuruilong 已提交
698
#ifdef CONCAT_OP
N
nhzlx 已提交
699
template <typename Dtype>
朔-望's avatar
朔-望 已提交
700
class ConcatParam : public OpParam {
N
nhzlx 已提交
701 702 703
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
704
 public:
705
  ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
706 707 708 709
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    inputs_ = InputMultiFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
710
    axis_ = GetAttr<int>("axis", attrs);
711
    original_output_dims_size_ = out_->dims().size();
712
  }
朔-望's avatar
朔-望 已提交
713

N
nhzlx 已提交
714
  vector<GType *> Inputs() const { return inputs_; }
朔-望's avatar
朔-望 已提交
715

xiebaiyuan's avatar
xiebaiyuan 已提交
716
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
717

718
  const int &Axis() const { return axis_; }
朔-望's avatar
朔-望 已提交
719

720
 public:
N
nhzlx 已提交
721
  vector<GType *> inputs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
722
  GType *out_;
723
  int axis_;
724
  int original_output_dims_size_;
Z
zhangyang 已提交
725 726 727 728 729 730 731 732 733
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::ConcatArgs fpga_concat_args;

 public:
  const fpga::ConcatArgs &FpgaArgs() const { return fpga_concat_args; }
  void SetFpgaArgs(const fpga::ConcatArgs &args) { fpga_concat_args = args; }
#endif
朔-望's avatar
朔-望 已提交
734
};
L
liuruilong 已提交
735
#endif
朔-望's avatar
朔-望 已提交
736

E
eclipsess 已提交
737 738 739 740 741 742 743 744
#ifdef SUM_OP
template <typename Dtype>
class SumParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SumParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
745 746 747 748 749 750
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    inputs_vars_ = InputMultiVarsFrom(inputs, *scope);
    out_var_ = OutVarFrom(outputs, *scope);
    inputs_ = InputMultiFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768
  }

  vector<Variable *> InputsVars() const { return inputs_vars_; }

  Variable *OutVar() const { return out_var_; }

  vector<GType *> Inputs() const { return inputs_; }

  GType *Out() const { return out_; }

 private:
  vector<Variable *> inputs_vars_;
  Variable *out_var_;
  vector<GType *> inputs_;
  GType *out_;
};
#endif

L
liuruilong 已提交
769
#ifdef LRN_OP
N
nhzlx 已提交
770
template <typename Dtype>
E
eclipsess 已提交
771
class LrnParam : public OpParam {
N
nhzlx 已提交
772 773 774
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
775
 public:
776
  LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
777 778 779 780 781
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    mid_out_ = MidOutFrom<GType>(outputs, *scope);
782 783 784 785
    n_ = GetAttr<int>("n", attrs);
    alpha_ = GetAttr<float>("alpha", attrs);
    beta_ = GetAttr<float>("beta", attrs);
    k_ = GetAttr<float>("k", attrs);
786
    data_format_ = GetStringAttr("data_format", attrs);
787
  }
E
eclipsess 已提交
788

789
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
790

791
  GType *Out() const { return out_; }
E
eclipsess 已提交
792

793
  GType *MidOut() const { return mid_out_; }
E
eclipsess 已提交
794

795
  const int &N() const { return n_; }
E
eclipsess 已提交
796

797
  const float &Alpha() const { return alpha_; }
E
eclipsess 已提交
798

799
  const float &Beta() const { return beta_; }
E
eclipsess 已提交
800

801
  const float &K() const { return k_; }
E
eclipsess 已提交
802

W
wangliu 已提交
803
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
804

朔-望's avatar
朔-望 已提交
805
 private:
806 807 808
  GType *input_x_;
  GType *out_;
  GType *mid_out_;
809 810 811 812
  int n_;
  float alpha_;
  float beta_;
  float k_;
W
wangliu 已提交
813
  string data_format_;
E
eclipsess 已提交
814
};
L
liuruilong 已提交
815 816
#endif

Z
zhaojiaying01 已提交
817 818
#ifdef NORM_OP
template <typename Dtype>
819
class NormParam : public OpParam {
Z
zhaojiaying01 已提交
820 821 822 823 824
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  NormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
825 826 827 828 829
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_norm_ = OutputNormFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
830 831 832 833
    epsilon_ = GetAttr<float>("epsilon", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }

834
  const GType *InputX() const { return input_x_; }
Z
zhaojiaying01 已提交
835

836
  GType *Out() const { return out_; }
Z
zhaojiaying01 已提交
837

838
  GType *OutputNorm() const { return output_norm_; }
Z
zhaojiaying01 已提交
839 840 841 842 843 844

  const float &Epsilon() const { return epsilon_; }

  const int &Axis() const { return axis_; }

 private:
845 846 847
  GType *input_x_;
  GType *out_;
  GType *output_norm_;
Z
zhaojiaying01 已提交
848 849 850 851 852
  float epsilon_;
  int axis_;
};
#endif

L
liuruilong 已提交
853
#ifdef BATCHNORM_OP
N
nhzlx 已提交
854
template <typename Dtype>
855
class BatchNormParam : public OpParam {
N
nhzlx 已提交
856 857 858
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
859
 public:
860
  BatchNormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
861 862 863 864 865 866 867 868
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_y_ = OutputYFrom<GType>(outputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, *scope);
869 870
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
L
liuruilong 已提交
871
    //    is_test_ = GetAttr<bool>("is_test", attrs);
872
  }
E
eclipsess 已提交
873

874
  ~BatchNormParam() {}
875

876
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
877

878
  GType *OutputY() const { return output_y_; }
E
eclipsess 已提交
879

880
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
881

882
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
883

884
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
885

886
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
887

888
  const float &Epsilon() const { return epsilon_; }
E
eclipsess 已提交
889

890
  const float &Momentum() const { return momentum_; }
E
eclipsess 已提交
891

892
  const bool &IsTest() const { return is_test_; }
E
eclipsess 已提交
893

W
wangliu 已提交
894
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
895

896 897 898
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
899

900 901 902
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
903

904
  const GType *NewScale() const { return new_scale_.get(); }
905

906
  const GType *NewBias() const { return new_bias_.get(); }
907

朔-望's avatar
朔-望 已提交
908
 private:
909 910 911 912 913 914
  GType *input_x_;
  GType *output_y_;
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
915 916 917
  float epsilon_;
  float momentum_;
  bool is_test_;
W
wangliu 已提交
918
  string data_format_;
919 920
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
E
eclipsess 已提交
921
};
L
liuruilong 已提交
922 923
#endif

924 925 926 927 928 929 930 931 932 933 934 935
#ifdef INSTANCENORM_OP
template <typename Dtype>
class InstanceNormParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  InstanceNormParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964
    output_y_ = OutputYFrom<GType>(outputs, *scope);
    epsilon_ = GetAttr<float>("epsilon", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *OutputY() const { return output_y_; }

  const float &Epsilon() const { return epsilon_; }

 private:
  GType *input_x_;
  GType *output_y_;
  float epsilon_;
};
#endif

#ifdef FUSION_INSTANCENORM_RELU_OP
template <typename Dtype>
class FusionInstanceNormReluParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionInstanceNormReluParam(const VariableNameMap &inputs,
                              const VariableNameMap &outputs,
                              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981
    out_ = OutFrom<GType>(outputs, *scope);
    epsilon_ = GetAttr<float>("epsilon", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *Out() const { return out_; }

  const float &Epsilon() const { return epsilon_; }

 private:
  GType *input_x_;
  GType *out_;
  float epsilon_;
};
#endif

L
liuruilong 已提交
982
#ifdef POOL_OP
N
nhzlx 已提交
983
template <typename Dtype>
984
class PoolParam : public OpParam {
N
nhzlx 已提交
985 986 987
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
988
 public:
989
  PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
990 991 992
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
993

994
    output_ = OutFrom<GType>(outputs, *scope);
995
    pooling_type_ = GetStringAttr("pooling_type", attrs);
W
wangliu 已提交
996 997 998
    ksize_ = GetAttr<vector<int>>("ksize", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
999
    ceil_mode_ = GetAttr<bool>("ceil_mode", attrs);
1000
    global_pooling_ = GetAttr<bool>("global_pooling", attrs);
1001 1002 1003 1004 1005 1006

    if (HasAttr("exclusive", attrs)) {
      exclusive_ = GetAttr<bool>("exclusive", attrs);
    } else {
      exclusive_ = true;
    }
1007
  }
1008

1009
  const GType *Input() const { return input_; }
1010

1011
  GType *Output() const { return output_; }
1012

W
wangliu 已提交
1013
  const string &PoolingType() const { return pooling_type_; }
1014

W
wangliu 已提交
1015
  const vector<int> &Ksize() const { return ksize_; }
1016

W
wangliu 已提交
1017
  const vector<int> &Strides() const { return strides_; }
1018

W
wangliu 已提交
1019
  const vector<int> &Paddings() const { return paddings_; }
1020

1021
  bool isCeilMode() const { return ceil_mode_; }
1022

Z
zhangyang 已提交
1023
  bool isGlobalPooling() const { return global_pooling_; }
1024

1025 1026
  bool isExclusive() const { return exclusive_; }

朔-望's avatar
朔-望 已提交
1027
 private:
1028 1029
  GType *input_;
  GType *output_;
W
wangliu 已提交
1030 1031 1032 1033
  string pooling_type_;
  vector<int> ksize_;
  vector<int> strides_;
  vector<int> paddings_;
1034
  bool ceil_mode_;
1035
  bool global_pooling_ = false;
1036
  bool exclusive_ = true;
Z
zhangyang 已提交
1037
#ifdef PADDLE_MOBILE_FPGA
1038 1039

 private:
H
hanbuhe 已提交
1040
  fpga::PoolingArgs fpga_pool_args;
Z
zhangyang 已提交
1041 1042

 public:
H
hanbuhe 已提交
1043 1044
  const fpga::PoolingArgs &FpgaArgs() const { return fpga_pool_args; }
  void SetFpgaArgs(const fpga::PoolingArgs &args) { fpga_pool_args = args; }
Z
zhangyang 已提交
1045
#endif
1046
};
L
liuruilong 已提交
1047 1048 1049
#endif

#ifdef PRIORBOX_OP
N
nhzlx 已提交
1050
template <typename Dtype>
E
eclipsess 已提交
1051
class PriorBoxParam : public OpParam {
N
nhzlx 已提交
1052 1053 1054
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1055 1056
 public:
  PriorBoxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1057 1058 1059 1060 1061 1062
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    input_image_ = InputImageFrom<GType>(inputs, *scope);
    output_boxes_ = OutputBoxesFrom<GType>(outputs, *scope);
    output_variances_ = OutputVariancesFrom<GType>(outputs, *scope);
W
wangliu 已提交
1063 1064 1065 1066
    min_sizes_ = GetAttr<vector<float>>("min_sizes", attrs);
    max_sizes_ = GetAttr<vector<float>>("max_sizes", attrs);
    aspect_ratios_ = GetAttr<vector<float>>("aspect_ratios", attrs);
    variances_ = GetAttr<vector<float>>("variances", attrs);
1067 1068 1069 1070

    if (HasAttr("min_max_aspect_ratios_order", attrs)) {
      min_max_aspect_ratios_order_ =
          GetAttr<bool>("min_max_aspect_ratios_order", attrs);
Y
yangfei 已提交
1071 1072
    } else {
      min_max_aspect_ratios_order_ = false;
1073
    }
E
eclipsess 已提交
1074 1075 1076 1077 1078 1079
    flip_ = GetAttr<bool>("flip", attrs);
    clip_ = GetAttr<bool>("clip", attrs);
    step_w_ = GetAttr<float>("step_w", attrs);
    step_h_ = GetAttr<float>("step_h", attrs);
    offset_ = GetAttr<float>("offset", attrs);
  }
1080
  const GType *Input() const { return input_; }
E
eclipsess 已提交
1081

1082
  const GType *InputImage() const { return input_image_; }
E
eclipsess 已提交
1083

1084
  GType *OutputBoxes() const { return output_boxes_; }
E
eclipsess 已提交
1085

1086
  GType *OutputVariances() const { return output_variances_; }
E
eclipsess 已提交
1087

W
wangliu 已提交
1088
  const vector<float> &MinSizes() const { return min_sizes_; }
E
eclipsess 已提交
1089

W
wangliu 已提交
1090
  const vector<float> &MaxSizes() const { return max_sizes_; }
E
eclipsess 已提交
1091

W
wangliu 已提交
1092
  const vector<float> &AspectRatios() const { return aspect_ratios_; }
E
eclipsess 已提交
1093

W
wangliu 已提交
1094
  const vector<float> &Variances() const { return variances_; }
E
eclipsess 已提交
1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105

  const bool &Flip() const { return flip_; }

  const bool &Clip() const { return clip_; }

  const float &StepW() const { return step_w_; }

  const float &StepH() const { return step_h_; }

  const float &Offset() const { return offset_; }

1106 1107 1108 1109
  const bool &MinMaxAspectRatiosOrder() const {
    return min_max_aspect_ratios_order_;
  }

E
eclipsess 已提交
1110
 private:
1111 1112 1113 1114
  GType *input_;
  GType *input_image_;
  GType *output_boxes_;
  GType *output_variances_;
W
wangliu 已提交
1115 1116 1117 1118
  vector<float> min_sizes_;
  vector<float> max_sizes_;
  vector<float> aspect_ratios_;
  vector<float> variances_;
E
eclipsess 已提交
1119 1120 1121 1122 1123
  bool flip_;
  bool clip_;
  float step_w_;
  float step_h_;
  float offset_;
1124
  bool min_max_aspect_ratios_order_;
E
eclipsess 已提交
1125
};
L
liuruilong 已提交
1126
#endif
E
eclipsess 已提交
1127

L
liuruilong 已提交
1128
#ifdef BOXCODER_OP
N
nhzlx 已提交
1129
template <typename Dtype>
E
eclipsess 已提交
1130
class BoxCoderParam : public OpParam {
N
nhzlx 已提交
1131 1132 1133
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1134 1135
 public:
  BoxCoderParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1136 1137 1138 1139 1140 1141
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_priorbox_ = InputPriorBoxFrom<GType>(inputs, *scope);
    input_priorboxvar_ = InputPriorBoxVarFrom<GType>(inputs, *scope);
    input_targetbox_ = InputTargetBoxFrom<GType>(inputs, *scope);
    output_box_ = OutputBoxFrom<GType>(outputs, *scope);
1142
    code_type_ = GetStringAttr("code_type", attrs);
E
eclipsess 已提交
1143
  }
1144
  const GType *InputPriorBox() const { return input_priorbox_; }
E
eclipsess 已提交
1145

1146
  const GType *InputPriorBoxVar() const { return input_priorboxvar_; }
E
eclipsess 已提交
1147

1148
  const GType *InputTargetBox() const { return input_targetbox_; }
E
eclipsess 已提交
1149

1150
  GType *OutputBox() const { return output_box_; }
E
eclipsess 已提交
1151 1152 1153 1154

  const std::string &CodeType() const { return code_type_; }

 private:
1155 1156 1157 1158
  GType *input_priorbox_;
  GType *input_priorboxvar_;
  GType *input_targetbox_;
  GType *output_box_;
E
eclipsess 已提交
1159 1160
  std::string code_type_;
};
L
liuruilong 已提交
1161
#endif
W
wangliu 已提交
1162

L
liuruilong 已提交
1163
#ifdef SOFTMAX_OP
N
nhzlx 已提交
1164
template <typename Dtype>
W
wangliu 已提交
1165
class SoftmaxParam : public OpParam {
N
nhzlx 已提交
1166 1167 1168
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1169 1170
 public:
  SoftmaxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1171 1172 1173 1174
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
1175
  }
H
hjchen2 已提交
1176 1177
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
1178 1179

 private:
H
hjchen2 已提交
1180 1181
  GType *input_x_;
  GType *out_;
H
hanbuhe 已提交
1182 1183 1184

#ifdef PADDLE_MOBILE_FPGA

1185 1186
#ifdef PADDLE_MOBILE_FPGA_V1

H
hanbuhe 已提交
1187
 private:
1188
  std::shared_ptr<GType> float_input_x_;
H
hanbuhe 已提交
1189 1190 1191
  fpga::BypassArgs fpga_bypass_args;

 public:
1192
  GType *FloatInput() const {
H
hanbuhe 已提交
1193 1194
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
H
hjchen2 已提交
1195
  void SetFloatInput(LoDTensor *input) { float_input_x_.reset(input); }
H
hanbuhe 已提交
1196 1197
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209
#else

 private:
  fpga::BypassArgs fpga_bypass_args;

 public:
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }

 public:
  std::shared_ptr<Tensor> float_input_x_, float_out;
#endif
H
hanbuhe 已提交
1210
#endif
W
wangliu 已提交
1211
};
L
liuruilong 已提交
1212
#endif
W
wangliu 已提交
1213

L
liuruilong 已提交
1214
#ifdef SIGMOID_OP
N
nhzlx 已提交
1215
template <typename Dtype>
W
wangliu 已提交
1216
class SigmoidParam : public OpParam {
N
nhzlx 已提交
1217 1218 1219
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1220 1221
 public:
  SigmoidParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1222 1223 1224 1225
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
1226
  }
1227 1228
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
1229 1230

 private:
1231 1232
  GType *input_x_;
  GType *out_;
1233 1234 1235 1236 1237 1238 1239 1240 1241
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::BypassArgs fpga_bypass_args;

 public:
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
W
wangliu 已提交
1242
};
L
liuruilong 已提交
1243 1244 1245
#endif

#ifdef MULTICLASSNMS_OP
N
nhzlx 已提交
1246
template <typename Dtype>
E
eclipsess 已提交
1247
class MultiClassNMSParam : public OpParam {
N
nhzlx 已提交
1248 1249 1250
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1251 1252 1253
 public:
  MultiClassNMSParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
1254 1255 1256 1257 1258
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_bboxes_ = InputBBoxesFrom<GType>(inputs, *scope);
    input_scores_ = InputScoresFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1259 1260 1261 1262 1263 1264 1265 1266
    background_label_ = GetAttr<int>("background_label", attrs);
    nms_top_k_ = GetAttr<int>("nms_top_k", attrs);
    keep_top_k_ = GetAttr<int>("keep_top_k", attrs);
    nms_threshold_ = GetAttr<float>("nms_threshold", attrs);
    nms_eta_ = GetAttr<float>("nms_eta", attrs);
    score_threshold_ = GetAttr<float>("score_threshold", attrs);
  }

1267
  GType *InputBBoxes() const { return input_bboxes_; }
E
eclipsess 已提交
1268

1269
  GType *InputScores() const { return input_scores_; }
E
eclipsess 已提交
1270

1271
  GType *Out() const { return out_; }
E
eclipsess 已提交
1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285

  const int &BackGroundLabel() const { return background_label_; }

  const int &NMSTopK() const { return nms_top_k_; }

  const int &KeepTopK() const { return keep_top_k_; }

  const float &NMSThreshold() const { return nms_threshold_; }

  const float &NMSEta() const { return nms_eta_; }

  const float &ScoreThreshold() const { return score_threshold_; }

 private:
1286 1287 1288
  GType *input_bboxes_;
  GType *input_scores_;
  GType *out_;
E
eclipsess 已提交
1289 1290 1291 1292 1293 1294 1295
  int background_label_;
  int nms_top_k_;
  int keep_top_k_;
  float nms_threshold_;
  float nms_eta_;
  float score_threshold_;
};
L
liuruilong 已提交
1296
#endif
W
wangliu 已提交
1297

L
lijiancheng0614 已提交
1298 1299 1300 1301 1302 1303 1304 1305 1306
#ifdef POLYGONBOXTRANSFORM_OP
template <typename Dtype>
class PolygonBoxTransformParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  PolygonBoxTransformParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
1307 1308 1309 1310
                           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    output_ = OutputFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1311
  }
1312 1313
  const GType *Input() const { return input_; }
  GType *Output() const { return output_; }
L
lijiancheng0614 已提交
1314 1315

 private:
1316 1317
  GType *input_;
  GType *output_;
L
lijiancheng0614 已提交
1318 1319 1320
};
#endif

N
nhzlx 已提交
1321
template <typename Dtype>
L
liuruilong 已提交
1322
class FeedParam : public OpParam {
N
nhzlx 已提交
1323 1324 1325
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1326 1327
 public:
  FeedParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
1328
            const AttributeMap &attrs, Scope *scope)
1329
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
1330
    input_x_ = InputXFrom<std::vector<LoDTensor>>(inputs, *scope);
H
update  
hjchen2 已提交
1331
    out_ = OutFrom<GType>(outputs, *scope);
H
update  
hjchen2 已提交
1332
    col_ = GetAttr<int>("col", attrs);
H
update  
hjchen2 已提交
1333
    auto var = scope->FindVar("batch_size");
W
wangliu 已提交
1334
    batch_size = var->GetValue<int>();
L
liuruilong 已提交
1335
  }
H
hjchen2 已提交
1336
  const std::vector<LoDTensor> *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
1337
  GType *Out() const { return out_; }
H
update  
hjchen2 已提交
1338
  const int Col() const { return col_; }
W
wangliu 已提交
1339
  const int BatchSize() const { return batch_size; }
L
liuruilong 已提交
1340

L
liuruilong 已提交
1341
 private:
H
hjchen2 已提交
1342
  std::vector<LoDTensor> *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1343
  GType *out_;
H
update  
hjchen2 已提交
1344
  int col_;
W
wangliu 已提交
1345
  int batch_size;
L
liuruilong 已提交
1346 1347
};

N
nhzlx 已提交
1348
template <typename Dtype>
L
liuruilong 已提交
1349
class FetchParam : public OpParam {
N
nhzlx 已提交
1350 1351 1352
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1353 1354
 public:
  FetchParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
1355
             const AttributeMap &attrs, Scope *scope)
1356
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
1357 1358
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<std::vector<LoDTensor>>(outputs, *scope);
1359
    col_ = GetAttr<int>("col", attrs);
L
liuruilong 已提交
1360
  }
L
liuruilong 已提交
1361

H
hjchen2 已提交
1362 1363
  const GType *InputX() const { return input_x_; }
  std::vector<LoDTensor> *Out() const { return out_; }
1364
  const int Col() const { return col_; }
L
liuruilong 已提交
1365

L
liuruilong 已提交
1366
 private:
H
hjchen2 已提交
1367 1368
  GType *input_x_;
  std::vector<LoDTensor> *out_;
1369
  int col_;
qnqinan's avatar
qnqinan 已提交
1370
#ifdef PADDLE_MOBILE_FPGA
1371

qnqinan's avatar
qnqinan 已提交
1372
 public:
1373
#ifdef PADDLE_MOBILE_FPGA_V1
qnqinan's avatar
qnqinan 已提交
1374
  fpga::BypassArgs fpga_bypass_args;
1375
  Tensor aligned_out;
1376 1377 1378
#else
  std::shared_ptr<Tensor> aligned_out;
#endif
qnqinan's avatar
qnqinan 已提交
1379
#endif
L
liuruilong 已提交
1380 1381
};

L
lijiancheng0614 已提交
1382 1383 1384 1385 1386 1387 1388 1389 1390
#ifdef FILL_CONSTANT_OP
template <typename Dtype>
class FillConstantParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FillConstantParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
1391 1392 1393 1394
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    out_var_ = OutVarFrom(outputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1395 1396 1397 1398 1399 1400 1401
    dtype_ = GetAttr<int>("dtype", attrs);
    shape_ = GetAttr<vector<int>>("shape", attrs);
    value_ = GetAttr<float>("value", attrs);
  }

  Variable *OutVar() const { return out_var_; }

1402
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1403 1404 1405 1406 1407 1408 1409 1410 1411

  const int &DataDtype() const { return dtype_; }

  const vector<int> &Shape() const { return shape_; }

  const float &Value() const { return value_; }

 private:
  Variable *out_var_;
1412
  GType *out_;
L
lijiancheng0614 已提交
1413 1414 1415 1416 1417 1418
  int dtype_;
  vector<int> shape_;
  float value_;
};
#endif

1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467
#ifdef FILL_CONSTANT_BATCH_SIZE_LIKE_OP
template <typename Dtype>
class FillConstantBatchSizeLikeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FillConstantBatchSizeLikeParam(const VariableNameMap &inputs,
                                 const VariableNameMap &outputs,
                                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    out_var_ = OutVarFrom(outputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    dtype_ = GetAttr<int>("dtype", attrs);
    shape_ = GetAttr<vector<int>>("shape", attrs);
    value_ = GetAttr<float>("value", attrs);
    input_dim_idx_ = GetAttr<int>("input_dim_idx", attrs);
    output_dim_idx_ = GetAttr<int>("output_dim_idx", attrs);
  }

  Variable *OutVar() const { return out_var_; }

  const GType *Input() const { return input_; }

  GType *Out() const { return out_; }

  const int &DataDtype() const { return dtype_; }

  const vector<int> &Shape() const { return shape_; }

  const float &Value() const { return value_; }

  int InputDimIdx() const { return input_dim_idx_; }

  int OutputDimIdx() const { return output_dim_idx_; }

 private:
  GType *input_;
  Variable *out_var_;
  GType *out_;
  int dtype_;
  vector<int> shape_;
  float value_;
  int input_dim_idx_;
  int output_dim_idx_;
};
#endif

L
liuruilong 已提交
1468
#ifdef TRANSPOSE_OP
N
nhzlx 已提交
1469
template <typename Dtype>
E
eclipsess 已提交
1470
class TransposeParam : public OpParam {
N
nhzlx 已提交
1471 1472 1473
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1474 1475
 public:
  TransposeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1476 1477 1478 1479
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1480 1481 1482
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

1483
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1484

1485
  GType *Out() const { return out_; }
E
eclipsess 已提交
1486 1487 1488 1489

  const vector<int> &Axis() const { return axis_; }

 private:
1490 1491
  GType *input_x_;
  GType *out_;
E
eclipsess 已提交
1492 1493
  vector<int> axis_;
};
L
liuruilong 已提交
1494
#endif
E
eclipsess 已提交
1495

L
lijiancheng0614 已提交
1496 1497 1498 1499 1500 1501 1502 1503
#ifdef TRANSPOSE2_OP
template <typename Dtype>
class Transpose2Param : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  Transpose2Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
1504 1505 1506 1507 1508
                  const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_xshape_ = OutputXShapeFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1509 1510 1511
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

1512
  GType *InputX() const { return input_x_; }
L
lijiancheng0614 已提交
1513

1514
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1515

1516
  GType *OutputXShape() const { return output_xshape_; }
L
lijiancheng0614 已提交
1517 1518 1519 1520

  const vector<int> &Axis() const { return axis_; }

 private:
1521 1522 1523
  GType *input_x_;
  GType *out_;
  GType *output_xshape_;
L
lijiancheng0614 已提交
1524 1525 1526 1527
  vector<int> axis_;
};
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
1528 1529 1530 1531 1532 1533 1534 1535
#ifdef LOOKUP_OP
template <typename Dtype>
class LookupParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LookupParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1536 1537 1538 1539 1540
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_w_ = InputWFrom<GType>(inputs, *scope);
    input_ids_ = InputIdsFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566
    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }

  const GType *InputW() const { return input_w_; }
  const GType *InputIds() const { return input_ids_; }
  GType *Out() const { return out_; }
  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_w_;
  GType *input_ids_;
  GType *out_;
  int64_t padding_idx_;
};
#endif

#ifdef CRF_OP
template <typename Dtype>
class CrfParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  //    {G_OP_TYPE_CRF, {{"Emission", "Transition", "Label"}, {"ViterbiPath"}}},

  CrfParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1567 1568
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
xiebaiyuan's avatar
xiebaiyuan 已提交
1569
    // todo crf params
1570 1571 1572 1573
    input_emission_ = InputEmissionFrom<GType>(inputs, *scope);
    input_transition_ = InputTransitionFrom<GType>(inputs, *scope);
    input_label_ = InputLabelFrom<GType>(inputs, *scope);
    output_viterbipath_ = OutputViterbiPathFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
1574 1575 1576 1577 1578 1579
    //    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }
  const GType *InputEmission() const { return input_emission_; }
  const GType *InputTransition() const { return input_transition_; }
  const GType *InputLabel() const { return input_label_; }
  GType *outputVBP() const { return output_viterbipath_; }
1580 1581
  //  const GType *InputIds() const { return input_ids_; }
  //  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
1582 1583 1584 1585 1586 1587 1588 1589
  //  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_emission_;
  GType *input_transition_;
  GType *input_label_;
  GType *output_viterbipath_;

1590 1591
  //  GType *input_ids_;
  //  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1592 1593 1594 1595
  //  int64_t padding_idx_;
};
#endif

L
liuruilong 已提交
1596
#ifdef RESHAPE_OP
N
nhzlx 已提交
1597
template <typename Dtype>
E
eclipsess 已提交
1598
class ReshapeParam : public OpParam {
N
nhzlx 已提交
1599 1600 1601
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1602 1603
 public:
  ReshapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1604 1605 1606 1607 1608
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1609
    shape_ = GetAttr<vector<int>>("shape", attrs);
1610 1611 1612 1613 1614 1615 1616

    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
      DLOG << "ReshapeParam lost inplace params. maybe fluid updated";
    }
E
eclipsess 已提交
1617 1618
  }

1619
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1620

1621
  const GType *InputShape() const { return input_shape_; }
E
eclipsess 已提交
1622

1623
  GType *Out() const { return out_; }
E
eclipsess 已提交
1624 1625 1626 1627 1628 1629

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
1630 1631 1632
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
E
eclipsess 已提交
1633 1634 1635
  vector<int> shape_;
  bool inplace_;
};
L
liuruilong 已提交
1636
#endif
E
eclipsess 已提交
1637

L
lijiancheng0614 已提交
1638 1639 1640 1641 1642 1643 1644 1645
#ifdef RESHAPE2_OP
template <typename Dtype>
class Reshape2Param : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  Reshape2Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
1646 1647 1648 1649 1650 1651
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_xshape_ = OutputXShapeFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1652 1653 1654 1655 1656 1657 1658 1659
    shape_ = GetAttr<vector<int>>("shape", attrs);
    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
    }
  }

1660
  GType *InputX() const { return input_x_; }
L
lijiancheng0614 已提交
1661

E
eclipsess 已提交
1662
  const GType *InputShape() const { return input_shape_; }
L
lijiancheng0614 已提交
1663

E
eclipsess 已提交
1664
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1665

E
eclipsess 已提交
1666
  GType *OutputXShape() const { return output_xshape_; }
L
lijiancheng0614 已提交
1667 1668 1669 1670 1671 1672

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
E
eclipsess 已提交
1673 1674 1675 1676
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
  GType *output_xshape_;
L
lijiancheng0614 已提交
1677 1678 1679 1680 1681
  vector<int> shape_;
  bool inplace_;
};
#endif

T
Tian 已提交
1682
#ifdef SCALE_OP
N
nhzlx 已提交
1683
template <typename Dtype>
I
itminner 已提交
1684
class ScaleParam : public OpParam {
N
nhzlx 已提交
1685 1686 1687
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1688 1689
 public:
  ScaleParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1690 1691 1692 1693
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
1694 1695
    scale_ = GetAttr<float>("scale", attrs);
    bias_ = GetAttr<float>("bias", attrs);
I
itminner 已提交
1696 1697
  }

1698
  const GType *InputX() const { return input_x_; }
I
itminner 已提交
1699

1700
  GType *Out() const { return out_; }
I
itminner 已提交
1701

1702
  const float Scale() const { return scale_; }
I
itminner 已提交
1703

1704
  const float Bias() const { return bias_; }
I
itminner 已提交
1705 1706

 private:
1707 1708
  GType *input_x_;
  GType *out_;
1709 1710
  float scale_;
  float bias_;
I
itminner 已提交
1711
};
T
Tian 已提交
1712 1713 1714
#endif

#ifdef SLICE_OP
N
nhzlx 已提交
1715
template <typename Dtype>
I
itminner 已提交
1716
class SliceParam : public OpParam {
N
nhzlx 已提交
1717 1718 1719
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1720 1721
 public:
  SliceParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1722 1723 1724 1725
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
I
itminner 已提交
1726

1727 1728 1729
    axes_ = GetAttr<std::vector<int>>("axes", attrs);
    starts_ = GetAttr<std::vector<int>>("starts", attrs);
    ends_ = GetAttr<std::vector<int>>("ends", attrs);
1730 1731

    original_output_dims_size_ = output_->dims().size();
1732
  }
I
itminner 已提交
1733

1734 1735 1736 1737 1738 1739
 public:
  GType *input_;
  GType *output_;
  std::vector<int> axes_;
  std::vector<int> starts_;
  std::vector<int> ends_;
1740
  int original_output_dims_size_;
I
itminner 已提交
1741
};
T
Tian 已提交
1742 1743 1744
#endif

#ifdef RESIZE_OP
N
nhzlx 已提交
1745
template <typename Dtype>
T
Tian 已提交
1746
class ResizeParam : public OpParam {
N
nhzlx 已提交
1747 1748 1749
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1750 1751
 public:
  ResizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1752 1753 1754 1755 1756
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
I
itminner 已提交
1757 1758 1759 1760 1761 1762
    is_pyramid_test_ = GetAttr<bool>("is_pyramid_test", attrs);
    height_ = GetAttr<int>("height", attrs);
    width_ = GetAttr<int>("width", attrs);
    out_height_scale_ = GetAttr<float>("out_height_scale", attrs);
    out_width_scale_ = GetAttr<float>("out_width_scale", attrs);
  }
T
Tian 已提交
1763

1764
  const GType *InputX() const { return input_x_; }
T
Tian 已提交
1765

1766
  const GType *InputShape() const { return input_shape_; }
T
Tian 已提交
1767

1768
  GType *Out() const { return out_; }
T
Tian 已提交
1769

I
itminner 已提交
1770
  const bool &IsPyramidTest() const { return is_pyramid_test_; }
T
Tian 已提交
1771

I
itminner 已提交
1772
  const int &Height() const { return height_; }
T
Tian 已提交
1773

I
itminner 已提交
1774
  const int &Width() const { return width_; }
T
Tian 已提交
1775

I
itminner 已提交
1776
  const float &OutHeightScale() const { return out_height_scale_; }
T
Tian 已提交
1777

I
itminner 已提交
1778
  const float &OutWidthScale() const { return out_width_scale_; }
T
Tian 已提交
1779

I
itminner 已提交
1780
 private:
1781 1782 1783
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
I
itminner 已提交
1784 1785 1786 1787 1788
  bool is_pyramid_test_;
  int height_;
  int width_;
  float out_height_scale_;
  float out_width_scale_;
T
Tian 已提交
1789 1790 1791
};
#endif

L
liuruilong 已提交
1792
#ifdef RELU_OP
L
liuruilong 已提交
1793 1794 1795
/*
 * @b op 层实例化好这个 param 传递给 kernel 层使用
 * */
N
nhzlx 已提交
1796
template <typename Dtype>
D
relu  
dolphin8 已提交
1797
class ReluParamBase : public OpParam {
N
nhzlx 已提交
1798 1799 1800
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1801
 public:
D
relu  
dolphin8 已提交
1802
  ReluParamBase(const VariableNameMap &inputs, const VariableNameMap &outputs,
1803 1804 1805 1806
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1807 1808
  }

1809
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1810

1811
  GType *Out() const { return out_; }
E
eclipsess 已提交
1812 1813

 private:
1814 1815
  GType *input_x_;
  GType *out_;
E
eclipsess 已提交
1816
};
D
relu  
dolphin8 已提交
1817 1818 1819

template <typename Dtype>
class ReluParam : public ReluParamBase<Dtype> {
Y
yangfei 已提交
1820
 public:
D
relu  
dolphin8 已提交
1821 1822 1823
  using ReluParamBase<Dtype>::ReluParamBase;
};

Z
zp7 已提交
1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837
template <typename Dtype>
class Relu6Param : public ReluParamBase<Dtype> {
 public:
  Relu6Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, Scope *scope)
      : ReluParamBase<Dtype>(inputs, outputs, attrs, scope) {
    threshold = OpParam::GetAttr<float>("threshold", attrs);
  }
  float getThreshold() const { return threshold; }

 private:
  float threshold;
};

Y
yangfei 已提交
1838
#ifdef PADDLE_MOBILE_CL
D
relu  
dolphin8 已提交
1839 1840
template <>
class ReluParam<GPU_CL> : public ReluParamBase<GPU_CL> {
Y
yangfei 已提交
1841
 public:
D
relu  
dolphin8 已提交
1842
  using ReluParamBase<GPU_CL>::ReluParamBase;
Y
yangfei 已提交
1843 1844 1845
  framework::CLImage &getMidImage() { return midImage; }

 private:
D
relu  
dolphin8 已提交
1846 1847
  framework::CLImage midImage;
};
Y
yangfei 已提交
1848
#endif
D
relu  
dolphin8 已提交
1849

L
liuruilong 已提交
1850
#endif
E
eclipsess 已提交
1851

Z
zhangyang 已提交
1852 1853 1854 1855 1856 1857 1858 1859
#ifdef TANH_OP
template <typename Dtype>
class TanhParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  TanhParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1860 1861 1862 1863
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Z
zhangyang 已提交
1864
  }
1865 1866
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
Z
zhangyang 已提交
1867 1868

 private:
1869 1870
  GType *input_x_;
  GType *out_;
qnqinan's avatar
qnqinan 已提交
1871 1872 1873
#ifdef PADDLE_MOBILE_FPGA

 private:
1874
  std::shared_ptr<GType> float_input_x_;
qnqinan's avatar
qnqinan 已提交
1875 1876 1877
  fpga::BypassArgs fpga_bypass_args;

 public:
1878
  GType *FloatInput() const {
qnqinan's avatar
qnqinan 已提交
1879 1880
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
H
hjchen2 已提交
1881
  void SetFloatInput(LoDTensor *input) { float_input_x_.reset(input); }
qnqinan's avatar
qnqinan 已提交
1882 1883 1884
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
Z
zhangyang 已提交
1885
};
L
liuruilong 已提交
1886
#endif
E
eclipsess 已提交
1887

T
Tian 已提交
1888
#ifdef PRELU_OP
N
nhzlx 已提交
1889
template <typename Dtype>
T
Tian 已提交
1890
class PReluParam : public OpParam {
N
nhzlx 已提交
1891 1892 1893
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1894 1895
 public:
  PReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1896 1897
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
1898
    DLOG << "PReluParam inputs before";
1899 1900
    input_x_ = InputXFrom<GType>(inputs, *scope);
    alpha_ = InputAlphaFrom<GType>(inputs, *scope);
1901
    framework::DDim dims = alpha_->dims();
1902
    out_ = OutFrom<GType>(outputs, *scope);
1903
    mode_ = GetStringAttr("mode", attrs);
1904
    DLOG << "PReluParam mode after" << mode_;
I
itminner 已提交
1905
  }
1906 1907 1908
  const GType *InputX() const { return input_x_; }
  const GType *InputAlpha() const { return alpha_; }
  GType *Out() const { return out_; }
1909
  const std::string &Mode() const { return mode_; }
T
Tian 已提交
1910

I
itminner 已提交
1911
 private:
1912 1913 1914
  GType *input_x_;
  GType *out_;
  GType *alpha_;
1915
  std::string mode_;
T
Tian 已提交
1916 1917 1918
};
#endif

1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943
#ifdef LEAKY_RELU_OP
template <typename Dtype>
class LeakyReluParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LeakyReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    alpha_ = GetAttr<float>("alpha", attrs);
  }
  const GType *InputX() const { return input_x_; }
  const float Alpha() const { return alpha_; }
  GType *Out() const { return out_; }

 private:
  GType *input_x_;
  GType *out_;
  float alpha_;
};
#endif

N
nhzlx 已提交
1944
template <typename Dtype>
L
liuruilong 已提交
1945
class FusionFcParam : public OpParam {
N
nhzlx 已提交
1946 1947 1948
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1949
 public:
L
liuruilong 已提交
1950
  FusionFcParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1951 1952 1953 1954 1955 1956
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    input_z_ = InputZFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1957 1958 1959 1960
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }
Y
yangfei 已提交
1961
  GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1962

1963
  GType *InputY() const { return input_y_; }
E
eclipsess 已提交
1964

1965
  GType *InputZ() const { return input_z_; }
E
eclipsess 已提交
1966

xiebaiyuan's avatar
xiebaiyuan 已提交
1967
  GType *Out() const { return out_; }
E
eclipsess 已提交
1968 1969 1970 1971 1972 1973 1974 1975

  const int &XNumColDims() const { return x_num_col_dims_; }

  const int &YNumColDims() const { return y_num_col_dims_; }

  const int &Axis() const { return axis_; }

 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
1976
  GType *input_x_;
1977 1978
  GType *input_y_;
  GType *input_z_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1979
  GType *out_;
E
eclipsess 已提交
1980 1981 1982
  int x_num_col_dims_;
  int y_num_col_dims_;
  int axis_;
Z
zhangyang 已提交
1983

Z
ZhenWang 已提交
1984
#ifdef PADDLE_MOBILE_FPGA
1985
 private:  // NOLINT
Z
zhangyang 已提交
1986
  fpga::SplitConvArgs fpga_conv_args;
Z
zhangyang 已提交
1987 1988

 public:
Z
zhangyang 已提交
1989 1990
  const fpga::SplitConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::SplitConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1991
#endif
E
eclipsess 已提交
1992
};
1993 1994

#ifdef FUSION_FCRELU_OP
N
nhzlx 已提交
1995 1996
template <typename DeviceType>
using FusionFcReluParam = FusionFcParam<DeviceType>;
L
liuruilong 已提交
1997
#endif
E
eclipsess 已提交
1998

N
nhzlx 已提交
1999
template <typename Dtype>
2000
class FusionConvAddParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2001 2002 2003
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
2004
 public:
L
liuruilong 已提交
2005
  FusionConvAddParam(const VariableNameMap &inputs,
L
liuruilong 已提交
2006
                     const VariableNameMap &outputs, const AttributeMap &attrs,
2007
                     Scope *scope)
2008
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2009
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2010
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2011
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
2012
  }
2013
  GType *Bias() const { return bias_; }
W
wangliu 已提交
2014 2015 2016

  const int &Axis() const { return axis_; }

L
liuruilong 已提交
2017
 protected:
2018
  GType *bias_;
W
wangliu 已提交
2019 2020 2021
  int axis_;
};

N
nhzlx 已提交
2022 2023
template <typename Dtype>
Print &operator<<(Print &printer, const FusionConvAddParam<Dtype> &conv_param);
W
wangliu 已提交
2024

Z
zhangyang 已提交
2025
#ifdef FUSION_CONVADDRELU_OP
N
nhzlx 已提交
2026 2027
template <typename DeviceType>
class FusionConvAddReluParam : public FusionConvAddParam<DeviceType> {
L
liuruilong 已提交
2028
 public:
L
liuruilong 已提交
2029
  FusionConvAddReluParam(const VariableNameMap &inputs,
L
liuruilong 已提交
2030
                         const VariableNameMap &outputs,
2031
                         const AttributeMap &attrs, Scope *scope)
2032
      : FusionConvAddParam<DeviceType>(inputs, outputs, attrs, scope) {}
L
liuruilong 已提交
2033 2034 2035
};
#endif

2036
#ifdef FUSION_CONVADDPRELU_OP
2037 2038 2039 2040
template <typename Dtype>
class FusionConvAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2041 2042 2043 2044

 public:
  FusionConvAddPReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2045
                          const AttributeMap &attrs, Scope *scope)
2046
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2047
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, *scope);
2048
    mode_ = OpParam::GetStringAttr("mode", attrs);
2049
    framework::DDim dims = alpha_->dims();
2050
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2051
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2052
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2053
  }
2054
  const GType *InputAlpha() const { return alpha_; }
2055
  const std::string &Mode() const { return mode_; }
2056
  GType *Bias() const { return bias_; }
2057 2058 2059
  const int &Axis() const { return axis_; }

 protected:
2060
  GType *bias_;
2061
  int axis_;
2062
  GType *alpha_;
2063 2064 2065 2066 2067
  std::string mode_;
};
#endif

#ifdef FUSION_CONVADDADDPRELU_OP
2068 2069 2070 2071
template <typename Dtype>
class FusionConvAddAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2072 2073 2074 2075

 public:
  FusionConvAddAddPReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
2076
                             const AttributeMap &attrs, Scope *scope)
2077
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2078 2079
    bias1_ = OpParam::InputYFrom1<GType>(inputs, *scope);
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, *scope);
2080
    mode_ = OpParam::GetStringAttr("mode", attrs);
2081
    framework::DDim dims = alpha_->dims();
H
update  
hjchen2 已提交
2082
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2083
    axis_ = OpParam::GetAttr<int>("axis", attrs);
2084 2085 2086
    keyOutput_ = OpParam::Getkey("addOut", inputs, 0);
    keyX1_ = OpParam::Getkey("addX", inputs, 1);
    keyY1_ = OpParam::Getkey("Y", inputs, 1);
2087
    if (keyX1_ == keyOutput_) {
2088
      bias1_ = OpParam::InputYFrom1<GType>(inputs, *scope);
2089
    } else if (keyY1_ == keyOutput_) {
2090
      bias1_ = OpParam::InputXFrom1<GType>(inputs, *scope);
2091
    }
H
update  
hjchen2 已提交
2092
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2093
  }
2094
  const GType *InputAlpha() const { return alpha_; }
2095
  const std::string &Mode() const { return mode_; }
2096
  const GType *Bias1() const { return bias1_; }
2097

2098
  GType *Bias() const { return bias_; }
2099 2100 2101 2102

  const int &Axis() const { return axis_; }

 protected:
2103
  GType *bias_;
2104
  int axis_;
2105
  GType *alpha_;
2106
  std::string mode_;
2107
  GType *bias1_;
2108 2109 2110 2111 2112 2113
  std::string keyOutput_;
  std::string keyX1_;
  std::string keyY1_;
};
#endif

E
eclipsess 已提交
2114
#ifdef FUSION_CONVADDBNRELU_OP
N
nhzlx 已提交
2115
template <typename Dtype>
2116
class FusionConvAddBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2117 2118 2119
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
2120 2121 2122
 public:
  FusionConvAddBNReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
2123
                           const AttributeMap &attrs, Scope *scope)
2124
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2125
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2126
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2127 2128 2129 2130
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2131 2132
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2133
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
2134
  }
2135

2136
  ~FusionConvAddBNReluParam() {}
2137

2138
  GType *Bias() const { return bias_; }
E
eclipsess 已提交
2139 2140 2141

  const int &Axis() const { return axis_; }

2142
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
2143

2144
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
2145

2146
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
2147

2148
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
2149 2150 2151 2152 2153

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2154 2155 2156
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2157

2158 2159 2160
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2161

2162
  const GType *NewScale() const { return new_scale_.get(); }
E
eclipsess 已提交
2163

2164
  const GType *NewBias() const { return new_bias_.get(); }
E
eclipsess 已提交
2165 2166

 protected:
2167
  GType *bias_;
E
eclipsess 已提交
2168
  int axis_;
2169 2170 2171 2172
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
E
eclipsess 已提交
2173 2174
  float epsilon_;
  float momentum_;
2175 2176
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2177 2178 2179 2180 2181
};
#endif

#ifdef FUSION_CONVBNADDRELU_OP
template <typename Dtype>
2182
class FusionConvBNAddReluParam : public ConvParam<Dtype> {
2183 2184 2185 2186 2187 2188
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvBNAddReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
2189
                           const AttributeMap &attrs, Scope *scope)
2190
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2191
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2192
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2193 2194 2195 2196
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2197 2198
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
2199 2200 2201
    keyBNY_ = OpParam::Getkey("BNY", inputs, 0);
    keyX_ = OpParam::Getkey("X", inputs, 0);
    keyY_ = OpParam::Getkey("Y", inputs, 0);
2202
    if (keyX_ == keyBNY_) {
2203
      bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2204
    } else if (keyY_ == keyBNY_) {
2205
      bias_ = OpParam::InputXFrom<GType>(inputs, *scope);
2206
    }
H
update  
hjchen2 已提交
2207
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2208
  }
2209

2210
  ~FusionConvBNAddReluParam() {}
2211
  GType *Bias() const { return bias_; }
2212 2213 2214

  const int &Axis() const { return axis_; }

2215
  const GType *InputBias() const { return input_bias_; }
2216

2217
  const GType *InputMean() const { return input_mean_; }
2218

2219
  const GType *InputScale() const { return input_scale_; }
2220

2221
  const GType *InputVariance() const { return input_variance_; }
2222 2223 2224 2225 2226

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2227 2228 2229
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2230

2231 2232 2233
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2234

2235
  const GType *NewScale() const { return new_scale_.get(); }
2236

2237
  const GType *NewBias() const { return new_bias_.get(); }
2238 2239

 protected:
2240
  GType *bias_;
2241
  int axis_;
2242 2243 2244 2245
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2246 2247
  float epsilon_;
  float momentum_;
2248 2249
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2250 2251 2252
  std::string keyBNY_;
  std::string keyX_;
  std::string keyY_;
E
eclipsess 已提交
2253
};
2254
#endif
E
eclipsess 已提交
2255

Z
zhangyang 已提交
2256
#ifdef FUSION_CONVBN_OP
N
nhzlx 已提交
2257
template <typename Dtype>
2258
class FusionConvBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2259 2260 2261
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Z
zhangyang 已提交
2262 2263 2264
 public:
  FusionConvBNParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
2265
                    Scope *scope)
2266
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2267 2268 2269 2270
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2271 2272
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2273
    this->output_ = OpParam::OutputYFrom<GType>(outputs, *scope);
Z
zhangyang 已提交
2274 2275
  }

2276
  const GType *InputBias() const { return input_bias_; }
Z
zhangyang 已提交
2277

2278
  const GType *InputMean() const { return input_mean_; }
Z
zhangyang 已提交
2279

2280
  const GType *InputScale() const { return input_scale_; }
Z
zhangyang 已提交
2281

2282
  const GType *InputVariance() const { return input_variance_; }
Z
zhangyang 已提交
2283 2284 2285 2286 2287

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2288 2289 2290
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
Z
zhangyang 已提交
2291

2292 2293 2294
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
Z
zhangyang 已提交
2295

2296
  const GType *NewScale() const { return new_scale_.get(); }
Z
zhangyang 已提交
2297

2298
  const GType *NewBias() const { return new_bias_.get(); }
Z
zhangyang 已提交
2299 2300

 protected:
2301 2302 2303 2304
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
Z
zhangyang 已提交
2305 2306
  float epsilon_;
  float momentum_;
2307 2308
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
Z
zhangyang 已提交
2309 2310 2311
};
#endif

2312
#ifdef FUSION_CONVADDBN_OP
N
nhzlx 已提交
2313
template <typename Dtype>
2314
class FusionConvAddBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2315 2316 2317
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

2318 2319 2320
 public:
  FusionConvAddBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
2321
                       const AttributeMap &attrs, Scope *scope)
2322
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2323
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2324
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2325 2326 2327 2328
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2329 2330
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2331
    this->output_ = OpParam::OutputYFrom<GType>(outputs, *scope);
2332
  }
2333
  GType *Bias() const { return bias_; }
2334 2335 2336

  const int &Axis() const { return axis_; }

2337
  const GType *InputBias() const { return input_bias_; }
2338

2339
  const GType *InputMean() const { return input_mean_; }
2340

2341
  const GType *InputScale() const { return input_scale_; }
2342

2343
  const GType *InputVariance() const { return input_variance_; }
2344 2345 2346 2347 2348

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2349 2350 2351
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2352

2353 2354 2355
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2356

2357
  const GType *NewScale() const { return new_scale_.get(); }
2358

2359
  const GType *NewBias() const { return new_bias_.get(); }
2360 2361

 protected:
2362
  GType *bias_;
2363
  int axis_;
2364 2365 2366 2367
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2368 2369
  float epsilon_;
  float momentum_;
2370 2371
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2372
};
E
eclipsess 已提交
2373
#endif
Y
Yao,kun 已提交
2374

E
eclipsess 已提交
2375
#ifdef FUSION_DWCONVBNRELU_OP
N
nhzlx 已提交
2376
template <typename Dtype>
2377
class FusionDWConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2378 2379 2380
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
2381 2382 2383
 public:
  FusionDWConvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2384
                          const AttributeMap &attrs, Scope *scope)
2385
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2386 2387 2388 2389
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2390 2391
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2392
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
2393 2394
  }

2395
  ~FusionDWConvBNReluParam() {}
2396

2397
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
2398

2399
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
2400

2401
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
2402

2403
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
2404 2405 2406 2407 2408

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2409 2410 2411
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2412

2413 2414 2415
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2416

2417
  const GType *NewScale() const { return new_scale_.get(); }
E
eclipsess 已提交
2418

2419
  const GType *NewBias() const { return new_bias_.get(); }
E
eclipsess 已提交
2420 2421

 protected:
2422 2423 2424 2425
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
E
eclipsess 已提交
2426 2427
  float epsilon_;
  float momentum_;
2428 2429
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
E
eclipsess 已提交
2430 2431 2432 2433
};

#endif

2434 2435 2436 2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448 2449
#ifdef FUSION_CONVRELU_OP
template <typename Dtype>
class FusionConvReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvReluParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
                      Scope *scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
  }
};
#endif

2450
#ifdef FUSION_CONVBNRELU_OP
N
nhzlx 已提交
2451
template <typename Dtype>
2452
class FusionConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2453 2454 2455
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

2456 2457 2458
 public:
  FusionConvBNReluParam(const VariableNameMap &inputs,
                        const VariableNameMap &outputs,
2459
                        const AttributeMap &attrs, Scope *scope)
2460
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2461 2462 2463 2464
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2465 2466
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2467
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2468 2469
  }

2470
  ~FusionConvBNReluParam() {}
2471

2472
  const GType *InputBias() const { return input_bias_; }
2473

2474
  const GType *InputMean() const { return input_mean_; }
2475

2476
  const GType *InputScale() const { return input_scale_; }
2477

2478
  const GType *InputVariance() const { return input_variance_; }
2479 2480 2481 2482 2483

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2484 2485 2486
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2487

2488 2489 2490
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2491

2492
  const GType *NewScale() const { return new_scale_.get(); }
2493

2494
  const GType *NewBias() const { return new_bias_.get(); }
2495 2496

 protected:
2497 2498 2499 2500
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2501 2502
  float epsilon_;
  float momentum_;
2503 2504
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2505 2506 2507
};
#endif

Y
Yao,kun 已提交
2508
#ifdef IM2SEQUENCE_OP
N
nhzlx 已提交
2509
template <typename Dtype>
Y
Yao,kun 已提交
2510
class Im2SequenceParam : public OpParam {
N
nhzlx 已提交
2511 2512 2513
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
2514 2515 2516
 public:
  Im2SequenceParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
2517 2518 2519 2520
                   Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Y
Yao,kun 已提交
2521 2522 2523 2524 2525
    kernels_ = GetAttr<vector<int>>("kernels", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
  }

E
eclipsess 已提交
2526
  const GType *Input() const { return input_x_; }
Y
Yao,kun 已提交
2527

E
eclipsess 已提交
2528
  GType *Output() const { return out_; }
Y
Yao,kun 已提交
2529 2530 2531 2532 2533 2534 2535 2536

  const vector<int> &Kernels() const { return kernels_; }

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

 private:
E
eclipsess 已提交
2537 2538
  GType *input_x_;
  GType *out_;
Y
Yao,kun 已提交
2539 2540 2541 2542
  vector<int> kernels_;
  vector<int> strides_;
  vector<int> paddings_;
};
2543
#endif
Y
Yao,kun 已提交
2544

2545
#ifdef DROPOUT_OP
N
nhzlx 已提交
2546
template <typename Dtype>
Y
Yao,kun 已提交
2547
class DropoutParam : public OpParam {
N
nhzlx 已提交
2548 2549 2550
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
2551 2552
 public:
  DropoutParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2553 2554 2555 2556
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Y
yangfei 已提交
2557 2558

    dropout_prob_ = GetAttr<float>("dropout_prob", attrs);
Y
Yao,kun 已提交
2559 2560
  }

2561
  const GType *InputX() const { return input_x_; }
Y
Yao,kun 已提交
2562

2563
  GType *Out() const { return out_; }
Y
Yao,kun 已提交
2564

Y
yangfei 已提交
2565 2566
  float DropoutProb() const { return dropout_prob_; }

Y
Yao,kun 已提交
2567
 private:
2568 2569
  GType *input_x_;
  GType *out_;
Y
yangfei 已提交
2570
  float dropout_prob_;
Y
Yao,kun 已提交
2571
};
2572
#endif
Y
Yao,kun 已提交
2573

N
nhzlx 已提交
2574
template <typename Dtype>
L
liuruilong 已提交
2575
class ConvTransposeParam : public OpParam {
N
nhzlx 已提交
2576 2577 2578
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
2579 2580 2581
 public:
  ConvTransposeParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
2582 2583
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
2584 2585
    filter_ = OpParam::FilterFrom<GType>(inputs, *scope);
    input_ = OpParam::InputFrom<GType>(inputs, *scope);
2586
    // output_ = OutputFrom<GType>(outputs, scope);
qnqinan's avatar
qnqinan 已提交
2587
    if (outputs.count("Output")) {
2588
      output_ = OpParam::OutputFrom<GType>(outputs, *scope);
qnqinan's avatar
qnqinan 已提交
2589
    }
L
liuruilong 已提交
2590 2591 2592
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
2593 2594 2595 2596
    if (HasAttr("output_size", attrs)) {
      output_size_ = GetAttr<vector<int>>("output_size", attrs);
      DLOG << "conv transpose output size: " << output_size_;
    }
L
liuruilong 已提交
2597 2598 2599
    groups = GetAttr<int>("groups", attrs);
  }

2600
  const GType *Input() const { return input_; }
L
liuruilong 已提交
2601

2602
  GType *Filter() const { return filter_; }
L
liuruilong 已提交
2603

2604
  GType *Output() const { return output_; }
L
liuruilong 已提交
2605 2606 2607 2608 2609

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

2610 2611 2612 2613
  const vector<int> &Filters() const { return filter_; }

  const vector<int> &TransFilters() const { return transformed_filter_; }

L
liuruilong 已提交
2614 2615
  const vector<int> &Dilations() const { return dilations_; }

2616 2617
  const vector<int> &OutputSize() const { return output_size_; }

L
liuruilong 已提交
2618 2619
  const int &Groups() const { return groups; }

H
hjchen2 已提交
2620 2621 2622 2623 2624
  enum ExecMode {
    EXEC_INVALID = 0,
    EXEC_GEMM_FLOAT,
    EXEC_DECONV3X3_FLOAT,
    EXEC_DECONV4X4_FLOAT,
2625 2626
    EXEC_DEPTHWISETRANS_FLOAT,
    EXEC_CONVTRANS3x3s2_FLOAT,
H
hjchen2 已提交
2627 2628 2629 2630
  };

  ExecMode &ExecMode() const { return exec_mode_; }

L
liuruilong 已提交
2631
 private:
2632 2633 2634
  GType *input_;
  GType *output_;
  GType *filter_;
2635
  GType *transformed_filter_;
L
liuruilong 已提交
2636 2637 2638
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
2639
  vector<int> output_size_;
L
liuruilong 已提交
2640
  int groups;
H
hjchen2 已提交
2641
  mutable enum ExecMode exec_mode_;
Z
zhangyang 已提交
2642 2643 2644 2645 2646

#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::DeconvArgs fpga_conv_args;
qnqinan's avatar
qnqinan 已提交
2647
  fpga::DWDeconvArgs fpga_DWDeconv_args;
Z
zhangyang 已提交
2648 2649 2650

 public:
  const fpga::DeconvArgs &FpgaArgs() const { return fpga_conv_args; }
qnqinan's avatar
qnqinan 已提交
2651 2652 2653
  const fpga::DWDeconvArgs &FpgaDWDconvArgs() const {
    return fpga_DWDeconv_args;
  }
Z
zhangyang 已提交
2654
  void SetFpgaArgs(const fpga::DeconvArgs &args) { fpga_conv_args = args; }
qnqinan's avatar
qnqinan 已提交
2655 2656 2657
  void SetFpgaArgs(const fpga::DWDeconvArgs &args) {
    fpga_DWDeconv_args = args;
  }
Z
zhangyang 已提交
2658
#endif
L
liuruilong 已提交
2659
};
Z
zhangyang 已提交
2660

qnqinan's avatar
qnqinan 已提交
2661 2662 2663 2664 2665
#ifdef FUSION_DECONVADD_OP
template <typename Dtype>
class FusionDeconvAddParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2666 2667

 public:
qnqinan's avatar
qnqinan 已提交
2668
  FusionDeconvAddParam(const VariableNameMap &inputs,
2669
                       const VariableNameMap &outputs,
2670
                       const AttributeMap &attrs, Scope *scope)
2671
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2672
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
qnqinan's avatar
qnqinan 已提交
2673
    axis_ = OpParam::GetAttr<int>("axis", attrs);
2674
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
qnqinan's avatar
qnqinan 已提交
2675
  }
2676
  GType *Bias() const { return bias_; }
qnqinan's avatar
qnqinan 已提交
2677 2678 2679

  const int &Axis() const { return axis_; }

2680
  GType *Output() const { return output_; }
qnqinan's avatar
qnqinan 已提交
2681 2682

 protected:
2683
  GType *bias_;
qnqinan's avatar
qnqinan 已提交
2684
  int axis_;
2685
  GType *output_;
qnqinan's avatar
qnqinan 已提交
2686 2687 2688 2689 2690 2691 2692
};
#endif

#ifdef FUSION_DECONVADDRELU_OP
template <typename Dtype>
using FusionDeconvAddReluParam = FusionDeconvAddParam<Dtype>;
#endif
2693 2694 2695 2696 2697 2698 2699 2700 2701
#ifdef FUSION_DECONVADDBN_OP
template <typename Dtype>
class FusionDeconvAddBNParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvAddBNParam(const VariableNameMap &inputs,
                         const VariableNameMap &outputs,
2702
                         const AttributeMap &attrs, Scope *scope)
2703
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2704 2705 2706 2707 2708
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2709 2710 2711 2712 2713 2714 2715
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }
2716 2717 2718 2719 2720 2721 2722 2723 2724 2725 2726 2727 2728

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2729 2730 2731
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2732

2733 2734 2735
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2736

2737
  const RType *NewScale() const { return new_scale_.get(); }
2738

2739
  const RType *NewBias() const { return new_bias_.get(); }
2740 2741 2742 2743 2744 2745 2746 2747 2748 2749

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2750 2751
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2752 2753 2754 2755 2756 2757 2758 2759 2760 2761 2762
};
#endif
#ifdef FUSION_DECONVBNRELU_OP
template <typename Dtype>
class FusionDeconvBNReluParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2763
                          const AttributeMap &attrs, Scope *scope)
2764
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2765 2766 2767 2768 2769
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2770 2771 2772 2773 2774 2775
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }
2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787 2788

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2789 2790 2791
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2792

2793 2794 2795
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2796

2797
  const RType *NewScale() const { return new_scale_.get(); }
2798

2799
  const RType *NewBias() const { return new_bias_.get(); }
2800 2801 2802 2803 2804 2805 2806 2807 2808 2809

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2810 2811
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822
};
#endif
#ifdef FUSION_DECONVADDBNRELU_OP
template <typename Dtype>
class FusionDeconvAddBNReluParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvAddBNReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
2823
                             const AttributeMap &attrs, Scope *scope)
2824
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2825 2826 2827 2828 2829
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2850 2851 2852
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2853

2854 2855 2856
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2857

2858
  const RType *NewScale() const { return new_scale_.get(); }
2859

2860
  const RType *NewBias() const { return new_bias_.get(); }
2861 2862 2863 2864 2865 2866 2867 2868 2869 2870

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2871 2872
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2873 2874
};
#endif
L
liuruilong 已提交
2875

Z
zhangyang 已提交
2876 2877 2878 2879 2880
#ifdef FUSION_DECONVRELU_OP
template <typename Dtype>
using FusionDeconvReluParam = ConvTransposeParam<Dtype>;
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894
#ifdef GRU_OP
template <typename Dtype>
class GruParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  /**
   *
   * @param inputs
   * @param outputs
   * @param attrs
   * @param scope
   * */
  GruParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2895 2896 2897 2898 2899 2900 2901 2902
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_input_ = InputFrom<GType>(inputs, *scope);
    input_h0_ = InputH0From<GType>(inputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_weight_ = InputWeightFrom<GType>(inputs, *scope);

    output_batch_gate_ = OutputBatchGateFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2903
    output_batch_reset_hidden_prev_ =
2904 2905 2906
        OutputBatchResetHiddenPrevFrom<GType>(outputs, *scope);
    output_batch_hidden_ = OutputBatchHiddenFrom<GType>(outputs, *scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, *scope);
2907 2908
    activation_ = GetStringAttr("activation", attrs);
    gate_activation_ = GetStringAttr("gate_activation", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941
    is_reverse_ = GetAttr<bool>("is_reverse", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputH0() const { return input_h0_; }
  const GType *InputBias() const { return input_bias_; }
  const std::string &Activation() const { return activation_; }
  const std::string &GateActivation() const { return gate_activation_; }
  const bool &IsReverse() const { return is_reverse_; }

  GType *OutBatchGate() const { return output_batch_gate_; }
  GType *OutBatchResetHiddenPrev() const {
    return output_batch_reset_hidden_prev_;
  }
  GType *OutBatchHidden() const { return output_batch_hidden_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_h0_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_batch_gate_;
  GType *output_batch_reset_hidden_prev_;
  GType *output_batch_hidden_;
  GType *output_hidden_;
  std::string activation_;
  std::string gate_activation_;
  bool is_reverse_;
};
#endif

Z
zhaojiaying01 已提交
2942 2943 2944 2945 2946 2947 2948
#ifdef GRU_UNIT_OP
template <typename Dtype>
class GruUnitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  GruUnitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2949 2950 2951 2952 2953 2954 2955 2956
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_input_ = InputFrom<GType>(inputs, *scope);
    input_hidden_prev_ = InputHiddenPrevFrom<GType>(inputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_weight_ = InputWeightFrom<GType>(inputs, *scope);

    output_gate_ = OutputGateFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
2957
    output_reset_hidden_prev_ =
2958 2959
        OutputResetHiddenPrevFrom<GType>(outputs, *scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981 2982 2983 2984 2985 2986 2987
    activation_ = GetAttr<int>("activation", attrs);
    gate_activation_ = GetAttr<int>("gate_activation", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputHiddenPrev() const { return input_hidden_prev_; }
  const GType *InputBias() const { return input_bias_; }
  const int &Activation() const { return activation_; }
  const int &GateActivation() const { return gate_activation_; }

  GType *OutGate() const { return output_gate_; }
  GType *OutResetHiddenPrev() const { return output_reset_hidden_prev_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_hidden_prev_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_gate_;
  GType *output_reset_hidden_prev_;
  GType *output_hidden_;
  int activation_;
  int gate_activation_;
};
#endif

2988 2989 2990 2991 2992 2993 2994 2995
#ifdef FLATTEN_OP
template <typename Dtype>
class FlattenParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FlattenParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2996 2997 2998 2999
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3000
    axis = GetAttr<int>("axis", attrs);
3001
  }
3002 3003
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3004
  const int &Axis() const { return axis; }
3005 3006

 private:
3007 3008
  GType *input_x_;
  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3009
  int axis;
3010 3011 3012 3013 3014 3015 3016 3017 3018 3019 3020
};
#endif

#ifdef SPLIT_OP
template <typename Dtype>
class SplitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SplitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3021 3022 3023 3024
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    outs_ = OutMultiFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3025
    axis = GetAttr<int>("axis", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
3026 3027 3028 3029 3030 3031
    num = GetAttr<int>("num", attrs);
    sections = GetAttr<std::vector<int>>("sections", attrs);

    //    for (int i = 0; i < outs_.size(); ++i) {
    //      out_ts_.push_back(*scope.FindVar(outs_[i])->GetMutable());
    //    }
3032
  }
3033
  GType *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3034 3035 3036 3037 3038
  std::vector<GType *> Outs() const { return outs_; }
  int Axis() const { return axis; }
  int Num() const { return num; }
  std::vector<int> Sections() const { return sections; }
  //  std::vector<GType> OutTs() const { return out_ts_; }
3039 3040

 private:
3041
  GType *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3042
  std::vector<GType *> outs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3043
  int axis;
xiebaiyuan's avatar
xiebaiyuan 已提交
3044 3045
  int num;
  std::vector<int> sections;
3046
//  std::vector<GType> out_ts_;
3047 3048 3049 3050 3051 3052 3053 3054 3055
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::SplitArgs fpga_split_args;

 public:
  const fpga::SplitArgs &FpgaArgs() const { return fpga_split_args; }
  void SetFpgaArgs(const fpga::SplitArgs &args) { fpga_split_args = args; }
#endif
3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066 3067
};
#endif

#ifdef BILINEAR_INTERP_OP
template <typename Dtype>
class BilinearInterpParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  BilinearInterpParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
3068 3069 3070 3071 3072
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_outsize_ = InputOutSizeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3073 3074
    out_h_ = GetAttr<int>("out_h", attrs);
    out_w_ = GetAttr<int>("out_w", attrs);
3075
  }
3076
  const GType *InputX() const { return input_x_; }
3077 3078 3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106
  const GType *InputOutPutSize() const { return input_outsize_; }
  GType *Out() const { return out_; }
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }

 private:
  GType *input_x_;
  GType *input_outsize_;
  GType *out_;
  int out_h_;
  int out_w_;
};
#endif

#ifdef NEAREST_INTERP_OP
template <typename Dtype>
class NearestInterpolationParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  NearestInterpolationParam(const VariableNameMap &inputs,
                            const VariableNameMap &outputs,
                            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_outsize_ = InputOutSizeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    out_h_ = GetAttr<int>("out_h", attrs);
    out_w_ = GetAttr<int>("out_w", attrs);
3107 3108 3109 3110 3111 3112
    if (HasAttr("scale", attrs)) {
      has_scale_ = true;
      scale_ = GetAttr<float>("scale", attrs);
    }
    DLOG << "has_scale_:  " << has_scale_;
    DLOG << "scale_:  " << scale_;
3113 3114
  }
  const GType *InputX() const { return input_x_; }
3115 3116
  const GType *InputOutPutSize() const { return input_outsize_; }
  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3117 3118
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }
3119 3120
  float Scale() const { return scale_; }
  bool HasScale() const { return has_scale_; }
3121 3122

 private:
3123 3124 3125
  GType *input_x_;
  GType *input_outsize_;
  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3126 3127
  int out_h_;
  int out_w_;
3128 3129
  float scale_;
  bool has_scale_;
3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140
};
#endif

#ifdef SHAPE_OP
template <typename Dtype>
class ShapeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ShapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3141 3142 3143 3144
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
3145
  }
3146 3147
  const GType *Input() const { return input_; }
  GType *Out() const { return out_; }
3148 3149

 private:
3150 3151
  GType *input_;
  GType *out_;
3152 3153 3154
};
#endif

H
hjchen2 已提交
3155 3156 3157 3158 3159 3160 3161 3162
#ifdef TOP_K_OP
template <typename Dtype>
class TopKParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  TopKParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3163 3164 3165 3166 3167
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
    indices_ = OpParam::GetVarValue<GType>("Indices", outputs, *scope);
H
hjchen2 已提交
3168 3169 3170 3171
    k_ = OpParam::GetAttr<int>("k", attrs);
  }

 public:
3172 3173 3174
  GType *input_;
  GType *output_;
  GType *indices_;
H
hjchen2 已提交
3175 3176 3177 3178 3179 3180 3181 3182 3183 3184 3185 3186
  int k_;
};
#endif  // TOP_K_OP

#ifdef CAST_OP
template <typename Dtype>
class CastParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  CastParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3187 3188 3189 3190
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
H
hjchen2 已提交
3191 3192 3193 3194 3195
    input_type_ = OpParam::GetAttr<int>("in_dtype", attrs);
    output_type_ = OpParam::GetAttr<int>("out_dtype", attrs);
  }

 public:
3196 3197
  GType *input_;
  GType *output_;
H
hjchen2 已提交
3198 3199 3200 3201 3202
  int input_type_;
  int output_type_;
};
#endif  // CAST_OP

3203
#ifdef QUANT_OP
3204
template <typename Dtype>
3205 3206 3207 3208 3209
class QuantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3210
  QuantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3211 3212 3213 3214
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3215 3216
    // online
    // scale = max(abs(x))
3217
    online_scale_ = OpParam::GetVarValue<GType>("OutScale", outputs, *scope);
3218
    // offline
3219
    if (inputs.count("InScale")) {
3220
      offline_ = true;
3221
      offline_scale_ = OpParam::GetVarValue<GType>("InScale", inputs, *scope);
3222 3223
    }
    // x = round(scale * x)
3224 3225
    if (OpParam::HasAttr("round_type", attrs)) {
      round_type_ = OpParam::GetAttr<RoundType>("round_type", attrs);
H
hjchen2 已提交
3226
    }
3227 3228 3229 3230
  }

 public:
  // op input
3231
  GType *input_;
3232
  // op output
3233
  GType *output_;
3234
  GType *online_scale_;
3235
  // quantize offline scale
3236
  GType *offline_scale_;
3237 3238
  // if offine scale or not
  bool offline_ = false;
3239
  // round method type
3240 3241
  // RoundType round_type_ = ROUND_NEAREST_AWAY_ZERO;
  RoundType round_type_ = ROUND_NEAREST_TOWARDS_ZERO;
3242
};
3243
#endif
3244

3245
#ifdef DEQUANT_OP
3246
template <typename Dtype>
3247 3248 3249 3250 3251
class DequantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3252
  DequantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3253 3254 3255 3256 3257
                  const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
    activation_scale_ = OpParam::GetVarValue<GType>("Scale", inputs, *scope);
3258
    // dequantization is performed as x = x / static_scale / online_scale
3259 3260
    if (OpParam::HasAttr("weight_scale", attrs)) {
      weight_scale_ = OpParam::GetAttr<float>("weight_scale", attrs);
3261
    } else {
3262
      weight_scale_ = OpParam::GetAttr<float>("max_range", attrs);
3263 3264 3265 3266 3267
    }
  }

 public:
  // op input
3268
  GType *input_;
3269
  // op output
3270
  GType *output_;
3271
  GType *activation_scale_;
3272 3273
  float weight_scale_;
};
3274
#endif
3275

3276 3277 3278 3279
#if defined(FUSION_DEQUANT_BN_OP) || defined(FUSION_DEQUANT_ADD_BN_OP) || \
    defined(FUSION_DEQUANT_ADD_BN_RELU_OP) ||                             \
    defined(FUSION_DEQUANT_BN_RELU_OP) ||                                 \
    defined(FUSION_DEQUANT_ADD_BN_QUANT_OP) ||                            \
3280
    defined(FUSION_DEQUANT_ADD_BN_RELU_QUANT_OP)
H
hjchen2 已提交
3281
template <typename Dtype>
3282
class FusionDequantBNParam : public DequantizeParam<Dtype> {
H
hjchen2 已提交
3283 3284 3285 3286
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3287 3288
  FusionDequantBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
3289
                       const AttributeMap &attrs, Scope *scope)
H
hjchen2 已提交
3290 3291
      : DequantizeParam<Dtype>(inputs, outputs, attrs, scope) {
    // batch norm params
3292 3293 3294 3295
    bn_mean_ = OpParam::GetVarValue<GType>("BNMean", inputs, *scope);
    bn_variance_ = OpParam::GetVarValue<GType>("BNVariance", inputs, *scope);
    bn_scale_ = OpParam::GetVarValue<GType>("BNScale", inputs, *scope);
    bn_bias_ = OpParam::GetVarValue<GType>("BNBias", inputs, *scope);
H
hjchen2 已提交
3296 3297 3298 3299 3300
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
  }

 public:
  // batch norm
3301 3302 3303 3304
  GType *bn_mean_;
  GType *bn_variance_;
  GType *bn_scale_;
  GType *bn_bias_;
H
hjchen2 已提交
3305
  float epsilon_;
3306 3307 3308
};
#endif

3309 3310 3311 3312
#if defined(FUSION_DEQUANT_ADD_BN_RELU_OP) ||  \
    defined(FUSION_DEQUANT_ADD_BN_OP) ||       \
    defined(FUSION_DEQUANT_ADD_BN_QUANT_OP) || \
    defined(FUSION_DEQUANT_ADD_BN_RELU_QUANT_OP)
3313 3314 3315 3316 3317 3318 3319 3320
template <typename Dtype>
class FusionDequantAddBNParam : public FusionDequantBNParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDequantAddBNParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
3321
                          const AttributeMap &attrs, Scope *scope)
3322 3323 3324
      : FusionDequantBNParam<Dtype>(inputs, outputs, attrs, scope) {
    // element wise add params
    axis_ = OpParam::GetAttr<int>("axis", attrs);
3325
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
3326 3327 3328 3329 3330
  }

 public:
  // elementwise add
  int axis_;
3331
  GType *bias_;
3332 3333 3334
};
#endif

3335 3336 3337 3338 3339 3340 3341 3342 3343
#ifdef FUSION_DEQUANT_ADD_BN_QUANT_OP
template <typename Dtype>
class FusionDequantAddBNQuantParam : public FusionDequantAddBNParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDequantAddBNQuantParam(const VariableNameMap &inputs,
                               const VariableNameMap &outputs,
3344
                               const AttributeMap &attrs, Scope *scope)
3345 3346
      : FusionDequantAddBNParam<Dtype>(inputs, outputs, attrs, scope) {
    // scale output
3347
    online_scale_ = OpParam::GetVarValue<GType>("OutScale", outputs, *scope);
3348
    // offline
3349 3350
    if (inputs.count("InScale")) {
      offline_ = true;
3351
      offline_scale_ = OpParam::GetVarValue<GType>("InScale", inputs, *scope);
3352 3353 3354 3355 3356 3357 3358 3359
    }
    // x = round(scale * x)
    if (OpParam::HasAttr("round_type", attrs)) {
      round_type_ = OpParam::GetAttr<RoundType>("round_type", attrs);
    }
  }

 public:
3360
  GType *online_scale_;
3361
  // quantize offline scale
3362
  GType *offline_scale_;
3363 3364
  // if offine scale or not
  bool offline_ = false;
3365 3366 3367 3368 3369 3370
  // round method type
  // RoundType round_type_ = ROUND_NEAREST_AWAY_ZERO;
  RoundType round_type_ = ROUND_NEAREST_TOWARDS_ZERO;
};
#endif

3371 3372 3373 3374 3375 3376 3377 3378 3379
#ifdef SEQUENCE_EXPAND_OP
template <typename Dtype>
class SequenceExpandParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SequenceExpandParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
3380 3381 3382 3383 3384
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3385 3386 3387 3388 3389 3390 3391 3392 3393 3394 3395 3396 3397 3398 3399 3400 3401 3402 3403 3404 3405 3406 3407
    ref_level_ = -1;
    if (OpParam::HasAttr("ref_level", attrs)) {
      ref_level_ = OpParam::GetAttr<int>("ref_level", attrs);
    }
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  int ref_level_;
};
#endif  // SEQUENCE_EXPAND_OP

#ifdef SEQUENCE_POOL_OP
template <typename Dtype>
class SequencePoolParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SequencePoolParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3408 3409 3410 3411
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3412 3413
    pool_type_ = "MAX";
    if (OpParam::HasAttr("pooltype", attrs)) {
H
hjchen2 已提交
3414
      pool_type_ = OpParam::GetStringAttr("pooltype", attrs);
3415 3416 3417 3418 3419 3420 3421 3422 3423 3424
    }
  }

 public:
  GType *input_;
  GType *output_;
  std::string pool_type_;
};
#endif  // SEQUENCE_EXPAND_OP

3425 3426 3427 3428 3429 3430 3431 3432
#ifdef LOD_RESET_OP
template <typename Dtype>
class LodResetParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LodResetParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3433 3434 3435 3436
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3437 3438
    input_y_ = nullptr;
    if (inputs.count("Y")) {
3439
      input_y_ = InputYFrom<GType>(inputs, *scope);
3440 3441 3442
    } else {
      target_lod_ = OpParam::GetAttr<vector<int>>("target_lod", attrs);
    }
Z
zp7 已提交
3443 3444 3445
    if (HasAttr("append", attrs)) {
      append = OpParam::GetAttr<bool>("append", attrs);
    }
3446 3447 3448 3449 3450 3451 3452
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  std::vector<int> target_lod_;
3453
  bool append;
3454 3455 3456
};
#endif  // LOD_RESET_OP

3457 3458 3459 3460 3461 3462 3463 3464
#ifdef LESS_THAN_OP
template <typename Dtype>
class CompareParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  CompareParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3465 3466 3467 3468 3469
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3470 3471 3472 3473 3474 3475 3476 3477 3478 3479 3480
    axis_ = OpParam::GetAttr<int>("axis", attrs);
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  int axis_;
};
#endif  // LESS_THAN_OP

Z
zhaojiaying01 已提交
3481
#if defined(LOGICAL_AND_OP) || defined(LOGICAL_OR_OP) || defined(LOGICAL_XOR_OP)
3482
template <typename Dtype>
Z
zhaojiaying01 已提交
3483
class LogicalBinaryParam : public OpParam {
3484 3485 3486 3487
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
Z
zhaojiaying01 已提交
3488 3489
  LogicalBinaryParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
3490 3491 3492 3493 3494
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3495 3496 3497 3498 3499 3500 3501 3502 3503 3504 3505
  }

  const GType *InputX() const { return input_x_; }
  const GType *InputY() const { return input_y_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
};
Z
zhaojiaying01 已提交
3506
#endif  // LOGICAL_AND_OP LOGICAL_OR_OP LOGICAL_XOR_OP
3507 3508 3509

#ifdef LOGICAL_NOT_OP
template <typename Dtype>
Z
zhaojiaying01 已提交
3510
class LogicalUnaryParam : public OpParam {
3511 3512 3513 3514
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
Z
zhaojiaying01 已提交
3515 3516
  LogicalUnaryParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3517 3518 3519 3520
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3521 3522 3523 3524 3525 3526 3527 3528 3529 3530 3531
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *output_;
};
#endif  // LOGICAL_NOT_OP

3532 3533 3534
#ifdef WRITE_TO_ARRAY_OP
template <typename Dtype>
class WriteToArrayParam : public OpParam {
H
hjchen2 已提交
3535 3536 3537
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

3538 3539 3540
 public:
  WriteToArrayParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3541 3542
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
3543 3544 3545
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    index_ = OpParam::GetVarValue<GType>("I", inputs, *scope);
    output_ = OpParam::GetVarValue<std::vector<GType>>("Out", outputs, *scope);
3546 3547 3548
  }

 public:
H
hjchen2 已提交
3549 3550 3551
  GType *input_;
  GType *index_;
  std::vector<GType> *output_;
3552 3553 3554 3555 3556 3557
};
#endif

#ifdef READ_FROM_ARRAY_OP
template <typename Dtype>
class ReadFromArrayParam : public OpParam {
H
hjchen2 已提交
3558 3559 3560
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

3561 3562 3563
 public:
  ReadFromArrayParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
3564 3565
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
3566 3567 3568
    input_ = OpParam::GetVarValue<std::vector<GType>>("X", inputs, *scope);
    index_ = OpParam::GetVarValue<GType>("I", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
3569 3570 3571
  }

 public:
H
hjchen2 已提交
3572 3573 3574
  std::vector<GType> *input_;
  GType *index_;
  GType *output_;
3575 3576 3577
};
#endif

Z
zhaojiaying01 已提交
3578 3579 3580 3581 3582 3583 3584 3585
#ifdef IS_EMPTY_OP
template <typename Dtype>
class IsEmptyParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  IsEmptyParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3586 3587 3588 3589
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
3590 3591 3592 3593 3594 3595 3596 3597 3598 3599 3600 3601 3602 3603 3604 3605 3606 3607 3608
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *output_;
};
#endif  // IS_EMPTY_OP

#ifdef INCREMENT_OP
template <typename Dtype>
class IncrementParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  IncrementParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
3609
                 const AttributeMap &attrs, Scope *scope)
3610
      : OpParam(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
3611 3612
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
H
update  
hjchen2 已提交
3613
    step_ = OpParam::GetAttr<float>("step", attrs);
Z
zhaojiaying01 已提交
3614 3615 3616 3617
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }
H
update  
hjchen2 已提交
3618
  float Step() const { return step_; }
Z
zhaojiaying01 已提交
3619 3620 3621 3622

 public:
  GType *input_x_;
  GType *output_;
H
update  
hjchen2 已提交
3623
  float step_;
Z
zhaojiaying01 已提交
3624 3625
};
#endif  // INCREMENT_OP
3626 3627
#ifdef PAD2D_OP
template <typename Dtype>
3628
class Pad2DParam : public OpParam {
3629 3630 3631 3632
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3633
  Pad2DParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3634 3635 3636 3637
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
3638 3639 3640 3641
    paddings_ = OpParam::GetAttr<std::vector<int>>("paddings", attrs);
    pad_value_ = OpParam::GetAttr<float>("pad_value", attrs);
    mode_ = OpParam::GetStringAttr("mode", attrs);
    DLOG << "mode" << mode_;
3642
  }
3643 3644 3645 3646 3647 3648
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }

  std::vector<int> paddings_;
  float pad_value_;
  std::string mode_;
3649 3650

 private:
3651 3652
  GType *input_x_;
  GType *out_;
3653 3654
};
#endif
H
Huie 已提交
3655 3656 3657 3658 3659
#ifdef EXP_OP
template <typename Dtype>
class EXPParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
Z
zhaojiaying01 已提交
3660

H
Huie 已提交
3661 3662 3663 3664 3665 3666 3667 3668 3669 3670 3671 3672 3673 3674 3675
 public:
  EXPParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
  }
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }

 private:
  GType *input_x_;
  GType *out_;
};
#endif
3676 3677 3678 3679 3680 3681 3682 3683 3684 3685 3686 3687 3688 3689 3690 3691 3692 3693 3694 3695 3696 3697 3698 3699 3700 3701 3702 3703 3704 3705

#ifdef PIXEL_SHUFFLE_OP
template <typename Dtype>
class PixelShuffleParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  PixelShuffleParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    upscale_factor_ = GetAttr<int>("upscale_factor", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *Out() const { return out_; }

  const int &upscale_factor() const { return upscale_factor_; }

 private:
  GType *input_x_;
  GType *out_;
  int upscale_factor_;
};
#endif

3706
#ifdef GRID_SAMPLER_OP
3707
template <typename Dtype>
3708
class GridSamplerParam : public OpParam {
3709 3710 3711 3712
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3713 3714 3715
  GridSamplerParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
                   Scope *scope)
3716 3717
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3718
    grid_ = GridFrom<GType>(inputs, *scope);
3719
    output_ = OutputFrom<GType>(outputs, *scope);
3720 3721 3722
  }

  const GType *InputX() const { return input_x_; }
3723
  const GType *Grid() const { return grid_; }
3724

3725
  GType *Output() const { return output_; }
3726 3727 3728

 private:
  GType *input_x_;
3729
  GType *grid_;
3730
  GType *output_;
3731 3732 3733
};
#endif

3734
#ifdef EXPAND_OP
3735
template <typename Dtype>
3736
class ExpandParam : public OpParam {
3737 3738 3739 3740
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3741 3742
  ExpandParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const AttributeMap &attrs, Scope *scope)
3743 3744
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3745 3746
    out_ = OutFrom<GType>(outputs, *scope);
    expand_times = OpParam::GetAttr<std::vector<int>>("expand_times", attrs);
3747 3748 3749 3750
  }

  const GType *InputX() const { return input_x_; }

3751 3752 3753
  GType *Out() const { return out_; }

  std::vector<int> expand_times;
3754 3755 3756

 private:
  GType *input_x_;
3757
  GType *out_;
3758 3759
};

3760
#endif
朔-望's avatar
朔-望 已提交
3761 3762
}  // namespace operators
}  // namespace paddle_mobile