op_param.h 112.1 KB
Newer Older
W
wangliu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
朔-望's avatar
朔-望 已提交
14

15
#pragma once
朔-望's avatar
朔-望 已提交
16

17
#include <memory>
E
eclipsess 已提交
18
#include <string>
W
wangliu 已提交
19
#include <vector>
L
liuruilong 已提交
20
#include "common/log.h"
朔-望's avatar
朔-望 已提交
21
#include "common/type_define.h"
N
nhzlx 已提交
22
#include "common/types.h"
23
#include "framework/attribute.h"
朔-望's avatar
朔-望 已提交
24 25 26
#include "framework/lod_tensor.h"
#include "framework/scope.h"
#include "framework/tensor.h"
27
#include "framework/type_trait.h"
朔-望's avatar
朔-望 已提交
28
#include "framework/variable.h"
Z
zhangyang 已提交
29 30 31 32 33 34 35

#ifdef PADDLE_MOBILE_FPGA_V1
#include "fpga/V1/api.h"
#endif

#ifdef PADDLE_MOBILE_FPGA_V2
#include "fpga/V2/api.h"
Z
zhangyang 已提交
36
#endif
朔-望's avatar
朔-望 已提交
37

C
Chon 已提交
38 39 40 41
#ifdef PADDLE_MOBILE_FPGA_KD
#include "fpga/KD/context.hpp"
#endif

L
liuruilong 已提交
42 43
#ifdef PADDLE_MOBILE_CL
#include "framework/cl/cl_image.h"
Z
zhangyang 已提交
44
#endif
朔-望's avatar
朔-望 已提交
45 46

namespace paddle_mobile {
朔-望's avatar
朔-望 已提交
47 48
namespace operators {

W
wangliu 已提交
49 50 51 52 53
using framework::Attribute;
using framework::AttributeMap;
using framework::LoDTensor;
using framework::Scope;
using framework::Tensor;
E
eclipsess 已提交
54
using framework::Variable;
W
wangliu 已提交
55 56
using std::string;
using std::vector;
朔-望's avatar
朔-望 已提交
57

58
using framework::DtypeTensorTrait;
L
liuruilong 已提交
59

60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
template <typename Dtype>
class CLImageDeleter {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  void operator()(GType *ptr) {
#ifdef PADDLE_MOBILE_CL
    framework::CLImage *image = dynamic_cast<framework::CLImage *>(ptr);
    if (image) {
      delete image;
    }
#endif
  }
};

L
liuruilong 已提交
75
class OpParam {
76 77
 public:
  OpParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
78 79
          const AttributeMap &attrs, Scope *scope)
      : scope_(scope) {}
80

81 82
  Scope *GetScope() const { return scope_; }
  Scope *scope_ = nullptr;
83

C
Chon 已提交
84 85 86 87 88 89
#ifdef PADDLE_MOBILE_FPGA_KD
  zynqmp::Context &context() { return context_; }

  zynqmp::Context context_;
#endif

朔-望's avatar
朔-望 已提交
90
 protected:
xiebaiyuan's avatar
xiebaiyuan 已提交
91 92 93 94
  template <typename T>
  static T *InputH0From(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("H0", inputs, scope);
  }
Z
zhaojiaying01 已提交
95 96 97 98 99 100 101

  template <typename T>
  static T *InputHiddenPrevFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("HiddenPrev", inputs, scope);
  }

102 103 104 105 106
  template <typename T>
  static T *InputAlphaFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Alpha", inputs, scope);
  }

107 108 109 110 111 112 113 114 115
  template <typename T>
  static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Input", inputs, scope);
  }

  template <typename T>
  static T *InputXFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("X", inputs, scope);
  }
116 117 118 119 120
  template <typename T>
  static T *InputOutSizeFrom(const VariableNameMap &inputs,
                             const Scope &scope) {
    return GetVarValue<T>("OutSize", inputs, scope);
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147

  template <typename T>
  static T *InputWFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("W", inputs, scope);
  }

  template <typename T>
  static T *InputIdsFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Ids", inputs, scope);
  }

  template <typename T>
  static T *InputEmissionFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Emission", inputs, scope);
  }

  template <typename T>
  static T *InputTransitionFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
    return GetVarValue<T>("Transition", inputs, scope);
  }
  template <typename T>
  static T *InputLabelFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Label", inputs, scope);
  }

148 149 150 151
  template <typename T>
  static T *InputXFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("addX", inputs, scope);
  }
152 153 154 155 156 157

  template <typename T>
  static T *InputYFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Y", inputs, scope);
  }

158 159 160 161 162
  template <typename T>
  static T *InputYFrom1(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue1<T>("Y", inputs, scope);
  }

E
eclipsess 已提交
163 164 165 166 167
  template <typename T>
  static T *InputZFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Z", inputs, scope);
  }

168 169 170 171 172
  template <typename T>
  static T *InputBiasFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Bias", inputs, scope);
  }
  template <typename T>
xiebaiyuan's avatar
xiebaiyuan 已提交
173 174 175 176
  static T *InputWeightFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Weight", inputs, scope);
  }
  template <typename T>
177 178 179 180 181 182 183 184 185 186 187 188
  static T *InputVarianceFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("Variance", inputs, scope);
  }
  template <typename T>
  static T *InputMeanFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Mean", inputs, scope);
  }
  template <typename T>
  static T *InputScaleFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scale", inputs, scope);
  }
E
eclipsess 已提交
189 190 191 192
  template <typename T>
  static T *InputImageFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Image", inputs, scope);
  }
E
eclipsess 已提交
193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
  template <typename T>
  static T *InputPriorBoxFrom(const VariableNameMap &inputs,
                              const Scope &scope) {
    return GetVarValue<T>("PriorBox", inputs, scope);
  }
  template <typename T>
  static T *InputPriorBoxVarFrom(const VariableNameMap &inputs,
                                 const Scope &scope) {
    return GetVarValue<T>("PriorBoxVar", inputs, scope);
  }
  // LoDTensor but now use Tensor
  template <typename T>
  static T *InputTargetBoxFrom(const VariableNameMap &inputs,
                               const Scope &scope) {
    return GetVarValue<T>("TargetBox", inputs, scope);
  }
209

E
eclipsess 已提交
210 211 212 213 214 215 216 217 218 219
  template <typename T>
  static T *InputBBoxesFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("BBoxes", inputs, scope);
  }

  template <typename T>
  static T *InputScoresFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Scores", inputs, scope);
  }

E
eclipsess 已提交
220 221 222 223
  template <typename T>
  static T *InputShapeFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Shape", inputs, scope);
  }
E
eclipsess 已提交
224

225
  template <typename T>
W
wangliu 已提交
226 227
  static vector<T *> InputMultiFrom(const VariableNameMap &inputs,
                                    const Scope &scope) {
228 229 230
    return GetMultiVarValue<T>("X", inputs, scope);
  }

E
eclipsess 已提交
231 232 233 234 235
  static vector<Variable *> InputMultiVarsFrom(const VariableNameMap &inputs,
                                               const Scope &scope) {
    return GetMultiVar("X", inputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
236 237 238 239 240 241
  template <typename T>
  static T *OutputBatchGateFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("BatchGate", outputs, scope);
  }

Z
zhaojiaying01 已提交
242 243 244 245 246
  template <typename T>
  static T *OutputGateFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Gate", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
247 248 249 250 251 252 253 254 255 256 257
  template <typename T>
  static T *OutputViterbiPathFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("ViterbiPath", outputs, scope);
  }
  template <typename T>
  static T *OutputBatchResetHiddenPrevFrom(const VariableNameMap &outputs,
                                           const Scope &scope) {
    return GetVarValue<T>("BatchResetHiddenPrev", outputs, scope);
  }

Z
zhaojiaying01 已提交
258 259 260 261 262 263
  template <typename T>
  static T *OutputResetHiddenPrevFrom(const VariableNameMap &outputs,
                                      const Scope &scope) {
    return GetVarValue<T>("ResetHiddenPrev", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
264 265 266 267 268 269 270 271 272 273 274 275
  template <typename T>
  static T *OutputBatchHiddenFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetVarValue<T>("BatchHidden", outputs, scope);
  }

  template <typename T>
  static T *OutputHiddenFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("Hidden", outputs, scope);
  }

276 277 278 279 280
  template <typename T>
  static T *OutputFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Output", outputs, scope);
  }

E
eclipsess 已提交
281 282 283 284 285
  static Variable *OutVarFrom(const VariableNameMap &outputs,
                              const Scope &scope) {
    return GetVar("Out", outputs, scope);
  }

286 287 288 289 290
  template <typename T>
  static T *OutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Out", outputs, scope);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
291 292 293 294 295 296
  template <typename T>
  static vector<T *> OutMultiFrom(const VariableNameMap &outputs,
                                  const Scope &scope) {
    return GetMultiVarValue<T>("Out", outputs, scope);
  }

297 298 299 300 301
  template <typename T>
  static T *OutputYFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Y", outputs, scope);
  }

L
lijiancheng0614 已提交
302 303 304 305 306 307
  template <typename T>
  static T *OutputXShapeFrom(const VariableNameMap &outputs,
                             const Scope &scope) {
    return GetVarValue<T>("XShape", outputs, scope);
  }

E
eclipsess 已提交
308 309 310 311 312 313
  template <typename T>
  static T *OutputBoxesFrom(const VariableNameMap &outputs,
                            const Scope &scope) {
    return GetVarValue<T>("Boxes", outputs, scope);
  }

E
eclipsess 已提交
314 315 316 317 318
  template <typename T>
  static T *OutputBoxFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("OutputBox", outputs, scope);
  }

Z
zhaojiaying01 已提交
319 320 321 322 323
  template <typename T>
  static T *OutputNormFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("Norm", outputs, scope);
  }

E
eclipsess 已提交
324 325 326 327 328 329
  template <typename T>
  static T *OutputVariancesFrom(const VariableNameMap &outputs,
                                const Scope &scope) {
    return GetVarValue<T>("Variances", outputs, scope);
  }

330 331 332 333 334 335 336 337 338 339
  template <typename T>
  static T *MidOutFrom(const VariableNameMap &outputs, const Scope &scope) {
    return GetVarValue<T>("MidOut", outputs, scope);
  }

  template <typename T>
  static T *FilterFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Filter", inputs, scope);
  }

340 341 342 343 344
  template <typename T>
  static T *GridFrom(const VariableNameMap &inputs, const Scope &scope) {
    return GetVarValue<T>("Grid", inputs, scope);
  }

345
  template <typename T>
W
wangliu 已提交
346
  static const T GetAttr(const string &key, const AttributeMap &map) {
347 348
    return ((Attribute)map.at(key)).Get<T>();
  }
xiebaiyuan's avatar
xiebaiyuan 已提交
349 350
  static const std::string GetStringAttr(const string &key,
                                         const AttributeMap &map) {
351 352
    return ((Attribute)map.at(key)).GetString();
  }
353

354 355 356 357
  static const bool HasAttr(const string &key, const AttributeMap &map) {
    return map.count(key) > 0;
  }

358
  template <typename T>
W
wangliu 已提交
359
  static T *GetVarValue(const string &key, const VariableNameMap &var_map,
360
                        const Scope &scope) {
W
wangliu 已提交
361 362
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
363 364 365 366 367 368
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
朔-望's avatar
朔-望 已提交
369
    }
370
  }
朔-望's avatar
朔-望 已提交
371

E
eclipsess 已提交
372 373 374 375 376 377 378 379 380 381 382 383 384
  static Variable *GetVar(const string &key, const VariableNameMap &var_map,
                          const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[0]);
      return var;
    } else {
      return nullptr;
    }
  }

385
  static std::string Getkey(const string &key, const VariableNameMap &var_map,
386
                            int index) {
387 388
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > index,
                          "%s is not contained in var_map", key.c_str())
389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406
    auto var_vec = var_map.at(key);
    return var_vec[index];
  }

  template <typename T>
  static T *GetVarValue1(const string &key, const VariableNameMap &var_map,
                         const Scope &scope) {
    PADDLE_MOBILE_ENFORCE(var_map.count(key) > 0,
                          "%s is not contained in var_map", key.c_str())
    auto var_vec = var_map.at(key);
    if (!var_vec.empty()) {
      auto var = scope.FindVar(var_vec[1]);
      return var->GetMutable<T>();
    } else {
      return nullptr;
    }
  }

407
  template <typename T>
W
wangliu 已提交
408 409 410
  static vector<T *> GetMultiVarValue(const string &key,
                                      const VariableNameMap &var_map,
                                      const Scope &scope) {
411 412
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
W
wangliu 已提交
413
    vector<T *> var_res;
414 415 416
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var->GetMutable<T>());
朔-望's avatar
朔-望 已提交
417
    }
418 419
    return var_res;
  }
E
eclipsess 已提交
420 421 422 423 424 425 426 427 428 429 430 431 432

  static vector<Variable *> GetMultiVar(const string &key,
                                        const VariableNameMap &var_map,
                                        const Scope &scope) {
    auto var_vecs = var_map.at(key);
    assert(var_vecs.size() > 1);
    vector<Variable *> var_res;
    for (auto &var_vec : var_vecs) {
      auto var = scope.FindVar(var_vec);
      var_res.push_back(var);
    }
    return var_res;
  }
朔-望's avatar
朔-望 已提交
433 434
};

435 436 437 438 439 440
#define GET_VAR_AS_TENSOR(name, name_dict, scope) \
  OpParam::GetVarValue<framework::Tensor>(name, name_dict, scope)

#define GET_VAR_AS_LOD_TENSOR(name, name_dict, scope) \
  OpParam::GetVarValue<framework::LoDTensor>(name, name_dict, scope)

N
nhzlx 已提交
441
template <typename Dtype>
442
class ConvParam : public OpParam {
N
nhzlx 已提交
443 444 445
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
446
 public:
447
  ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
448 449 450 451
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    filter_ = OpParam::FilterFrom<GType>(inputs, *scope);
    input_ = OpParam::InputFrom<GType>(inputs, *scope);
452
    if (outputs.count("Output")) {
453
      output_ = OpParam::OutputFrom<GType>(outputs, *scope);
454 455 456 457 458
    }
    strides_ = OpParam::GetAttr<vector<int>>("strides", attrs);
    paddings_ = OpParam::GetAttr<vector<int>>("paddings", attrs);
    dilations_ = OpParam::GetAttr<vector<int>>("dilations", attrs);
    groups = OpParam::GetAttr<int>("groups", attrs);
459
  }
朔-望's avatar
朔-望 已提交
460

461
  const GType *Input() const { return input_; }
朔-望's avatar
朔-望 已提交
462

463
  GType *Filter() const { return filter_; }
朔-望's avatar
朔-望 已提交
464

465
  GType *Output() const { return output_; }
朔-望's avatar
朔-望 已提交
466

W
wangliu 已提交
467
  const vector<int> &Strides() const { return strides_; }
朔-望's avatar
朔-望 已提交
468

W
wangliu 已提交
469
  const vector<int> &Paddings() const { return paddings_; }
朔-望's avatar
朔-望 已提交
470

W
wangliu 已提交
471
  const vector<int> &Dilations() const { return dilations_; }
朔-望's avatar
朔-望 已提交
472

H
hjchen2 已提交
473 474 475
  enum ExecMode {
    EXEC_INVALID = 0,
    EXEC_GEMM_FLOAT,
476 477
    EXEC_DEPTHWISE3x3S1_FLOAT,
    EXEC_DEPTHWISE3x3S2_FLOAT,
H
hjchen2 已提交
478 479
    EXEC_WINOGRAD3X3_FLOAT,
    EXEC_WINOGRAD5X5_FLOAT,
480
    EXEC_DEPTHWISE5x5_FLOAT,
H
hjchen2 已提交
481
    EXEC_GEMM_INT8,
H
hjchen2 已提交
482
    EXEC_DEPTHWISE3x3_INT8,
483
    EXEC_DEPTHWISE5x5_INT8,
S
StarryRain 已提交
484 485
    EXEC_SLIDINGWINDOW3x3S1_FLOAT,
    EXEC_SLIDINGWINDOW3x3S2_FLOAT,
486 487 488 489 490
    EXEC_DEPTHWISE3x3_FLOAT,
    EXEC_SLIDINGWINDOW1x1_FLOAT,
    EXEC_SLIDINGWINDOW3x3_FLOAT,
    EXEC_SLIDINGWINDOW5x5_FLOAT,
    EXEC_SLIDINGWINDOW7x7_FLOAT,
491
    EXEC_GEMM1x1s1_FLOAT,
H
hjchen2 已提交
492 493 494 495
  };

  ExecMode &ExecMode() const { return exec_mode_; }

496
  const int &Groups() const { return groups; }
朔-望's avatar
朔-望 已提交
497

498 499 500 501 502 503 504
#ifdef PADDLE_MOBILE_CL
  int Offset() const { return offset_; }

  int SetOffset(int in_offset) { offset_ = in_offset; }

#endif

H
hjchen2 已提交
505
 public:
506 507 508 509
  GType *input_;
  GType *output_;
  GType *filter_;
  GType *transformed_filter_;
W
wangliu 已提交
510 511 512
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
H
hjchen2 已提交
513
  mutable enum ExecMode exec_mode_;
514
  int groups;
515 516 517 518

#ifdef PADDLE_MOBILE_CL
  int offset_;
#endif
Z
zhangyang 已提交
519 520 521

#ifdef PADDLE_MOBILE_FPGA

H
hjchen2 已提交
522
 public:
Z
zhangyang 已提交
523 524 525 526 527
  fpga::SplitConvArgs fpga_conv_args;

 public:
  const fpga::SplitConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::SplitConvArgs &args) { fpga_conv_args = args; }
528 529 530 531 532 533 534

 public:
  fpga::DWconvArgs fpga_dwconv_args;

 public:
  const fpga::DWconvArgs &FpgaDwconvArgs() const { return fpga_dwconv_args; }
  void SetFpgaArgs(const fpga::DWconvArgs &args) { fpga_dwconv_args = args; }
Z
zhangyang 已提交
535
#endif
朔-望's avatar
朔-望 已提交
536
};
N
nhzlx 已提交
537 538
template <typename Dtype>
Print &operator<<(Print &printer, const ConvParam<Dtype> &conv_param);
朔-望's avatar
朔-望 已提交
539

N
nhzlx 已提交
540
template <typename Dtype>
541
class ElementwiseAddParam : public OpParam {
N
nhzlx 已提交
542 543 544
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
545
 public:
546
  ElementwiseAddParam(const VariableNameMap &inputs,
547
                      const VariableNameMap &outputs, const AttributeMap &attrs,
548 549 550 551 552
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
553 554 555
    axis_ = GetAttr<int>("axis", attrs);
  }

xiebaiyuan's avatar
xiebaiyuan 已提交
556
  const GType *InputX() const { return input_x_; }
557

xiebaiyuan's avatar
xiebaiyuan 已提交
558
  const GType *InputY() const { return input_y_; }
559

xiebaiyuan's avatar
xiebaiyuan 已提交
560
  GType *Out() const { return out_; }
561 562 563

  const int &Axis() const { return axis_; }

朔-望's avatar
朔-望 已提交
564
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
565 566 567
  GType *input_x_;
  GType *input_y_;
  GType *out_;
568
  int axis_;
Z
zhangyang 已提交
569 570 571
#ifdef PADDLE_MOBILE_FPGA

 private:
H
hanbuhe 已提交
572
  fpga::EWAddArgs fpga_EW_add_args;
Z
zhangyang 已提交
573 574

 public:
H
hanbuhe 已提交
575 576
  const fpga::EWAddArgs &FpgaArgs() const { return fpga_EW_add_args; }
  void SetFpgaArgs(const fpga::EWAddArgs &args) { fpga_EW_add_args = args; }
qnqinan's avatar
qnqinan 已提交
577 578 579 580

 public:
  Tensor float_input_x, float_out;

Z
zhangyang 已提交
581
#endif
朔-望's avatar
朔-望 已提交
582 583
};

E
eclipsess 已提交
584
#ifdef ELEMENTWISEMUL_OP
E
eclipsess 已提交
585
template <typename Dtype>
586
class ElementwiseMulParam : public OpParam {
E
eclipsess 已提交
587 588 589 590 591 592
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ElementwiseMulParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
593 594 595 596 597
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613
    axis_ = GetAttr<int>("axis", attrs);
  }

  const GType *InputX() const { return input_x_; }

  const GType *InputY() const { return input_y_; }

  GType *Out() const { return out_; }

  const int &Axis() const { return axis_; }

 private:
  GType *input_x_;
  GType *input_y_;
  GType *out_;
  int axis_;
qnqinan's avatar
qnqinan 已提交
614 615 616 617 618 619
#ifdef PADDLE_MOBILE_FPGA

 public:
  Tensor float_input_x, float_out;

#endif
E
eclipsess 已提交
620
};
S
suiyang 已提交
621
#endif
E
eclipsess 已提交
622

623
#ifdef FUSION_ELEMENTWISEADDRELU_OP
N
nhzlx 已提交
624 625
template <typename Dtype>
using ElementwiseAddReluParam = ElementwiseAddParam<Dtype>;
L
liuruilong 已提交
626 627
#endif

628
#ifdef ELEMENTWISESUB_OP
629
template <typename Dtype>
630
class ElementwiseSubParam : public OpParam {
631 632 633 634 635 636
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ElementwiseSubParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
637 638 639 640 641
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658
    axis_ = GetAttr<int>("axis", attrs);
  }

  const GType *InputX() const { return input_x_; }

  const GType *InputY() const { return input_y_; }

  GType *Out() const { return out_; }

  const int &Axis() const { return axis_; }

 private:
  GType *input_x_;
  GType *input_y_;
  GType *out_;
  int axis_;
};
659
#endif
660

L
liuruilong 已提交
661
#ifdef MUL_OP
N
nhzlx 已提交
662
template <typename Dtype>
663
class MulParam : public OpParam {
N
nhzlx 已提交
664 665 666
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
667
 public:
668
  MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
669 670 671 672 673
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
674 675 676
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
  }
朔-望's avatar
朔-望 已提交
677

678
  GType *InputX() const { return input_x_; }
朔-望's avatar
朔-望 已提交
679

680
  GType *InputY() const { return input_y_; }
朔-望's avatar
朔-望 已提交
681

xiebaiyuan's avatar
xiebaiyuan 已提交
682
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
683

684
  const int &XNumColDims() const { return x_num_col_dims_; }
朔-望's avatar
朔-望 已提交
685

686
  const int &YNumColDims() const { return y_num_col_dims_; }
朔-望's avatar
朔-望 已提交
687

朔-望's avatar
朔-望 已提交
688
 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
689 690 691
  GType *input_x_;
  GType *input_y_;
  GType *out_;
692 693
  int x_num_col_dims_;
  int y_num_col_dims_;
朔-望's avatar
朔-望 已提交
694
};
L
liuruilong 已提交
695
#endif
朔-望's avatar
朔-望 已提交
696

L
liuruilong 已提交
697
#ifdef CONCAT_OP
N
nhzlx 已提交
698
template <typename Dtype>
朔-望's avatar
朔-望 已提交
699
class ConcatParam : public OpParam {
N
nhzlx 已提交
700 701 702
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
703
 public:
704
  ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
705 706 707 708
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    inputs_ = InputMultiFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
709
    axis_ = GetAttr<int>("axis", attrs);
710
    original_output_dims_size_ = out_->dims().size();
711
  }
朔-望's avatar
朔-望 已提交
712

N
nhzlx 已提交
713
  vector<GType *> Inputs() const { return inputs_; }
朔-望's avatar
朔-望 已提交
714

xiebaiyuan's avatar
xiebaiyuan 已提交
715
  GType *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
716

717
  const int &Axis() const { return axis_; }
朔-望's avatar
朔-望 已提交
718

719
 public:
N
nhzlx 已提交
720
  vector<GType *> inputs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
721
  GType *out_;
722
  int axis_;
723
  int original_output_dims_size_;
Z
zhangyang 已提交
724 725 726 727 728 729 730 731 732
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::ConcatArgs fpga_concat_args;

 public:
  const fpga::ConcatArgs &FpgaArgs() const { return fpga_concat_args; }
  void SetFpgaArgs(const fpga::ConcatArgs &args) { fpga_concat_args = args; }
#endif
朔-望's avatar
朔-望 已提交
733
};
L
liuruilong 已提交
734
#endif
朔-望's avatar
朔-望 已提交
735

E
eclipsess 已提交
736 737 738 739 740 741 742 743
#ifdef SUM_OP
template <typename Dtype>
class SumParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SumParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
744 745 746 747 748 749
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    inputs_vars_ = InputMultiVarsFrom(inputs, *scope);
    out_var_ = OutVarFrom(outputs, *scope);
    inputs_ = InputMultiFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767
  }

  vector<Variable *> InputsVars() const { return inputs_vars_; }

  Variable *OutVar() const { return out_var_; }

  vector<GType *> Inputs() const { return inputs_; }

  GType *Out() const { return out_; }

 private:
  vector<Variable *> inputs_vars_;
  Variable *out_var_;
  vector<GType *> inputs_;
  GType *out_;
};
#endif

L
liuruilong 已提交
768
#ifdef LRN_OP
N
nhzlx 已提交
769
template <typename Dtype>
E
eclipsess 已提交
770
class LrnParam : public OpParam {
N
nhzlx 已提交
771 772 773
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
774
 public:
775
  LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
776 777 778 779 780
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    mid_out_ = MidOutFrom<GType>(outputs, *scope);
781 782 783 784
    n_ = GetAttr<int>("n", attrs);
    alpha_ = GetAttr<float>("alpha", attrs);
    beta_ = GetAttr<float>("beta", attrs);
    k_ = GetAttr<float>("k", attrs);
785
    data_format_ = GetStringAttr("data_format", attrs);
786
  }
E
eclipsess 已提交
787

788
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
789

790
  GType *Out() const { return out_; }
E
eclipsess 已提交
791

792
  GType *MidOut() const { return mid_out_; }
E
eclipsess 已提交
793

794
  const int &N() const { return n_; }
E
eclipsess 已提交
795

796
  const float &Alpha() const { return alpha_; }
E
eclipsess 已提交
797

798
  const float &Beta() const { return beta_; }
E
eclipsess 已提交
799

800
  const float &K() const { return k_; }
E
eclipsess 已提交
801

W
wangliu 已提交
802
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
803

朔-望's avatar
朔-望 已提交
804
 private:
805 806 807
  GType *input_x_;
  GType *out_;
  GType *mid_out_;
808 809 810 811
  int n_;
  float alpha_;
  float beta_;
  float k_;
W
wangliu 已提交
812
  string data_format_;
E
eclipsess 已提交
813
};
L
liuruilong 已提交
814 815
#endif

Z
zhaojiaying01 已提交
816 817
#ifdef NORM_OP
template <typename Dtype>
818
class NormParam : public OpParam {
Z
zhaojiaying01 已提交
819 820 821 822 823
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  NormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
824 825 826 827 828
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_norm_ = OutputNormFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
829 830 831 832
    epsilon_ = GetAttr<float>("epsilon", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }

833
  const GType *InputX() const { return input_x_; }
Z
zhaojiaying01 已提交
834

835
  GType *Out() const { return out_; }
Z
zhaojiaying01 已提交
836

837
  GType *OutputNorm() const { return output_norm_; }
Z
zhaojiaying01 已提交
838 839 840 841 842 843

  const float &Epsilon() const { return epsilon_; }

  const int &Axis() const { return axis_; }

 private:
844 845 846
  GType *input_x_;
  GType *out_;
  GType *output_norm_;
Z
zhaojiaying01 已提交
847 848 849 850 851
  float epsilon_;
  int axis_;
};
#endif

L
liuruilong 已提交
852
#ifdef BATCHNORM_OP
N
nhzlx 已提交
853
template <typename Dtype>
854
class BatchNormParam : public OpParam {
N
nhzlx 已提交
855 856 857
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
858
 public:
859
  BatchNormParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
860 861 862 863 864 865 866 867
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_y_ = OutputYFrom<GType>(outputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = InputVarianceFrom<GType>(inputs, *scope);
868 869
    epsilon_ = GetAttr<float>("epsilon", attrs);
    momentum_ = GetAttr<float>("momentum", attrs);
L
liuruilong 已提交
870
    //    is_test_ = GetAttr<bool>("is_test", attrs);
871
  }
E
eclipsess 已提交
872

873
  ~BatchNormParam() {}
874

875
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
876

877
  GType *OutputY() const { return output_y_; }
E
eclipsess 已提交
878

879
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
880

881
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
882

883
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
884

885
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
886

887
  const float &Epsilon() const { return epsilon_; }
E
eclipsess 已提交
888

889
  const float &Momentum() const { return momentum_; }
E
eclipsess 已提交
890

891
  const bool &IsTest() const { return is_test_; }
E
eclipsess 已提交
892

W
wangliu 已提交
893
  const string &DataFormat() const { return data_format_; }
E
eclipsess 已提交
894

895 896 897
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
898

899 900 901
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
902

903
  const GType *NewScale() const { return new_scale_.get(); }
904

905
  const GType *NewBias() const { return new_bias_.get(); }
906

朔-望's avatar
朔-望 已提交
907
 private:
908 909 910 911 912 913
  GType *input_x_;
  GType *output_y_;
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
914 915 916
  float epsilon_;
  float momentum_;
  bool is_test_;
W
wangliu 已提交
917
  string data_format_;
918 919
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
E
eclipsess 已提交
920
};
L
liuruilong 已提交
921 922
#endif

923 924 925 926 927 928 929 930 931 932 933 934
#ifdef INSTANCENORM_OP
template <typename Dtype>
class InstanceNormParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  InstanceNormParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963
    output_y_ = OutputYFrom<GType>(outputs, *scope);
    epsilon_ = GetAttr<float>("epsilon", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *OutputY() const { return output_y_; }

  const float &Epsilon() const { return epsilon_; }

 private:
  GType *input_x_;
  GType *output_y_;
  float epsilon_;
};
#endif

#ifdef FUSION_INSTANCENORM_RELU_OP
template <typename Dtype>
class FusionInstanceNormReluParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionInstanceNormReluParam(const VariableNameMap &inputs,
                              const VariableNameMap &outputs,
                              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980
    out_ = OutFrom<GType>(outputs, *scope);
    epsilon_ = GetAttr<float>("epsilon", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *Out() const { return out_; }

  const float &Epsilon() const { return epsilon_; }

 private:
  GType *input_x_;
  GType *out_;
  float epsilon_;
};
#endif

L
liuruilong 已提交
981
#ifdef POOL_OP
N
nhzlx 已提交
982
template <typename Dtype>
983
class PoolParam : public OpParam {
N
nhzlx 已提交
984 985 986
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

朔-望's avatar
朔-望 已提交
987
 public:
988
  PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
989 990 991
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
992

993
    output_ = OutFrom<GType>(outputs, *scope);
994
    pooling_type_ = GetStringAttr("pooling_type", attrs);
W
wangliu 已提交
995 996 997
    ksize_ = GetAttr<vector<int>>("ksize", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
998
    ceil_mode_ = GetAttr<bool>("ceil_mode", attrs);
999
    global_pooling_ = GetAttr<bool>("global_pooling", attrs);
1000 1001 1002 1003 1004 1005

    if (HasAttr("exclusive", attrs)) {
      exclusive_ = GetAttr<bool>("exclusive", attrs);
    } else {
      exclusive_ = true;
    }
1006
  }
1007

1008
  const GType *Input() const { return input_; }
1009

1010
  GType *Output() const { return output_; }
1011

W
wangliu 已提交
1012
  const string &PoolingType() const { return pooling_type_; }
1013

W
wangliu 已提交
1014
  const vector<int> &Ksize() const { return ksize_; }
1015

W
wangliu 已提交
1016
  const vector<int> &Strides() const { return strides_; }
1017

W
wangliu 已提交
1018
  const vector<int> &Paddings() const { return paddings_; }
1019

1020
  bool isCeilMode() const { return ceil_mode_; }
1021

Z
zhangyang 已提交
1022
  bool isGlobalPooling() const { return global_pooling_; }
1023

1024 1025
  bool isExclusive() const { return exclusive_; }

朔-望's avatar
朔-望 已提交
1026
 private:
1027 1028
  GType *input_;
  GType *output_;
W
wangliu 已提交
1029 1030 1031 1032
  string pooling_type_;
  vector<int> ksize_;
  vector<int> strides_;
  vector<int> paddings_;
1033
  bool ceil_mode_;
1034
  bool global_pooling_ = false;
1035
  bool exclusive_ = true;
Z
zhangyang 已提交
1036
#ifdef PADDLE_MOBILE_FPGA
1037 1038

 private:
H
hanbuhe 已提交
1039
  fpga::PoolingArgs fpga_pool_args;
Z
zhangyang 已提交
1040 1041

 public:
H
hanbuhe 已提交
1042 1043
  const fpga::PoolingArgs &FpgaArgs() const { return fpga_pool_args; }
  void SetFpgaArgs(const fpga::PoolingArgs &args) { fpga_pool_args = args; }
Z
zhangyang 已提交
1044
#endif
1045
};
L
liuruilong 已提交
1046 1047 1048
#endif

#ifdef PRIORBOX_OP
N
nhzlx 已提交
1049
template <typename Dtype>
E
eclipsess 已提交
1050
class PriorBoxParam : public OpParam {
N
nhzlx 已提交
1051 1052 1053
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1054 1055
 public:
  PriorBoxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1056 1057 1058 1059 1060 1061
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    input_image_ = InputImageFrom<GType>(inputs, *scope);
    output_boxes_ = OutputBoxesFrom<GType>(outputs, *scope);
    output_variances_ = OutputVariancesFrom<GType>(outputs, *scope);
W
wangliu 已提交
1062 1063 1064 1065
    min_sizes_ = GetAttr<vector<float>>("min_sizes", attrs);
    max_sizes_ = GetAttr<vector<float>>("max_sizes", attrs);
    aspect_ratios_ = GetAttr<vector<float>>("aspect_ratios", attrs);
    variances_ = GetAttr<vector<float>>("variances", attrs);
1066 1067 1068 1069

    if (HasAttr("min_max_aspect_ratios_order", attrs)) {
      min_max_aspect_ratios_order_ =
          GetAttr<bool>("min_max_aspect_ratios_order", attrs);
Y
yangfei 已提交
1070 1071
    } else {
      min_max_aspect_ratios_order_ = false;
1072
    }
E
eclipsess 已提交
1073 1074 1075 1076 1077 1078
    flip_ = GetAttr<bool>("flip", attrs);
    clip_ = GetAttr<bool>("clip", attrs);
    step_w_ = GetAttr<float>("step_w", attrs);
    step_h_ = GetAttr<float>("step_h", attrs);
    offset_ = GetAttr<float>("offset", attrs);
  }
1079
  const GType *Input() const { return input_; }
E
eclipsess 已提交
1080

1081
  const GType *InputImage() const { return input_image_; }
E
eclipsess 已提交
1082

1083
  GType *OutputBoxes() const { return output_boxes_; }
E
eclipsess 已提交
1084

1085
  GType *OutputVariances() const { return output_variances_; }
E
eclipsess 已提交
1086

W
wangliu 已提交
1087
  const vector<float> &MinSizes() const { return min_sizes_; }
E
eclipsess 已提交
1088

W
wangliu 已提交
1089
  const vector<float> &MaxSizes() const { return max_sizes_; }
E
eclipsess 已提交
1090

W
wangliu 已提交
1091
  const vector<float> &AspectRatios() const { return aspect_ratios_; }
E
eclipsess 已提交
1092

W
wangliu 已提交
1093
  const vector<float> &Variances() const { return variances_; }
E
eclipsess 已提交
1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104

  const bool &Flip() const { return flip_; }

  const bool &Clip() const { return clip_; }

  const float &StepW() const { return step_w_; }

  const float &StepH() const { return step_h_; }

  const float &Offset() const { return offset_; }

1105 1106 1107 1108
  const bool &MinMaxAspectRatiosOrder() const {
    return min_max_aspect_ratios_order_;
  }

E
eclipsess 已提交
1109
 private:
1110 1111 1112 1113
  GType *input_;
  GType *input_image_;
  GType *output_boxes_;
  GType *output_variances_;
W
wangliu 已提交
1114 1115 1116 1117
  vector<float> min_sizes_;
  vector<float> max_sizes_;
  vector<float> aspect_ratios_;
  vector<float> variances_;
E
eclipsess 已提交
1118 1119 1120 1121 1122
  bool flip_;
  bool clip_;
  float step_w_;
  float step_h_;
  float offset_;
1123
  bool min_max_aspect_ratios_order_;
E
eclipsess 已提交
1124
};
L
liuruilong 已提交
1125
#endif
E
eclipsess 已提交
1126

L
liuruilong 已提交
1127
#ifdef BOXCODER_OP
N
nhzlx 已提交
1128
template <typename Dtype>
E
eclipsess 已提交
1129
class BoxCoderParam : public OpParam {
N
nhzlx 已提交
1130 1131 1132
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1133 1134
 public:
  BoxCoderParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1135 1136 1137 1138 1139 1140
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_priorbox_ = InputPriorBoxFrom<GType>(inputs, *scope);
    input_priorboxvar_ = InputPriorBoxVarFrom<GType>(inputs, *scope);
    input_targetbox_ = InputTargetBoxFrom<GType>(inputs, *scope);
    output_box_ = OutputBoxFrom<GType>(outputs, *scope);
1141
    code_type_ = GetStringAttr("code_type", attrs);
E
eclipsess 已提交
1142
  }
1143
  const GType *InputPriorBox() const { return input_priorbox_; }
E
eclipsess 已提交
1144

1145
  const GType *InputPriorBoxVar() const { return input_priorboxvar_; }
E
eclipsess 已提交
1146

1147
  const GType *InputTargetBox() const { return input_targetbox_; }
E
eclipsess 已提交
1148

1149
  GType *OutputBox() const { return output_box_; }
E
eclipsess 已提交
1150 1151 1152 1153

  const std::string &CodeType() const { return code_type_; }

 private:
1154 1155 1156 1157
  GType *input_priorbox_;
  GType *input_priorboxvar_;
  GType *input_targetbox_;
  GType *output_box_;
E
eclipsess 已提交
1158 1159
  std::string code_type_;
};
L
liuruilong 已提交
1160
#endif
W
wangliu 已提交
1161

L
liuruilong 已提交
1162
#ifdef SOFTMAX_OP
N
nhzlx 已提交
1163
template <typename Dtype>
W
wangliu 已提交
1164
class SoftmaxParam : public OpParam {
N
nhzlx 已提交
1165 1166 1167
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1168 1169
 public:
  SoftmaxParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1170 1171 1172 1173
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
1174
  }
H
hjchen2 已提交
1175 1176
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
1177 1178

 private:
H
hjchen2 已提交
1179 1180
  GType *input_x_;
  GType *out_;
H
hanbuhe 已提交
1181 1182 1183

#ifdef PADDLE_MOBILE_FPGA

1184 1185
#ifdef PADDLE_MOBILE_FPGA_V1

H
hanbuhe 已提交
1186
 private:
1187
  std::shared_ptr<GType> float_input_x_;
H
hanbuhe 已提交
1188 1189 1190
  fpga::BypassArgs fpga_bypass_args;

 public:
1191
  GType *FloatInput() const {
H
hanbuhe 已提交
1192 1193
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
H
hjchen2 已提交
1194
  void SetFloatInput(LoDTensor *input) { float_input_x_.reset(input); }
H
hanbuhe 已提交
1195 1196
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208
#else

 private:
  fpga::BypassArgs fpga_bypass_args;

 public:
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }

 public:
  std::shared_ptr<Tensor> float_input_x_, float_out;
#endif
H
hanbuhe 已提交
1209
#endif
W
wangliu 已提交
1210
};
L
liuruilong 已提交
1211
#endif
W
wangliu 已提交
1212

L
liuruilong 已提交
1213
#ifdef SIGMOID_OP
N
nhzlx 已提交
1214
template <typename Dtype>
W
wangliu 已提交
1215
class SigmoidParam : public OpParam {
N
nhzlx 已提交
1216 1217 1218
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
1219 1220
 public:
  SigmoidParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1221 1222 1223 1224
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
1225
  }
1226 1227
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
W
wangliu 已提交
1228 1229

 private:
1230 1231
  GType *input_x_;
  GType *out_;
1232 1233 1234 1235 1236 1237 1238 1239 1240
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::BypassArgs fpga_bypass_args;

 public:
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
W
wangliu 已提交
1241
};
L
liuruilong 已提交
1242 1243 1244
#endif

#ifdef MULTICLASSNMS_OP
N
nhzlx 已提交
1245
template <typename Dtype>
E
eclipsess 已提交
1246
class MultiClassNMSParam : public OpParam {
N
nhzlx 已提交
1247 1248 1249
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1250 1251 1252
 public:
  MultiClassNMSParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
1253 1254 1255 1256 1257
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_bboxes_ = InputBBoxesFrom<GType>(inputs, *scope);
    input_scores_ = InputScoresFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1258 1259 1260 1261 1262 1263 1264 1265
    background_label_ = GetAttr<int>("background_label", attrs);
    nms_top_k_ = GetAttr<int>("nms_top_k", attrs);
    keep_top_k_ = GetAttr<int>("keep_top_k", attrs);
    nms_threshold_ = GetAttr<float>("nms_threshold", attrs);
    nms_eta_ = GetAttr<float>("nms_eta", attrs);
    score_threshold_ = GetAttr<float>("score_threshold", attrs);
  }

1266
  GType *InputBBoxes() const { return input_bboxes_; }
E
eclipsess 已提交
1267

1268
  GType *InputScores() const { return input_scores_; }
E
eclipsess 已提交
1269

1270
  GType *Out() const { return out_; }
E
eclipsess 已提交
1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284

  const int &BackGroundLabel() const { return background_label_; }

  const int &NMSTopK() const { return nms_top_k_; }

  const int &KeepTopK() const { return keep_top_k_; }

  const float &NMSThreshold() const { return nms_threshold_; }

  const float &NMSEta() const { return nms_eta_; }

  const float &ScoreThreshold() const { return score_threshold_; }

 private:
1285 1286 1287
  GType *input_bboxes_;
  GType *input_scores_;
  GType *out_;
E
eclipsess 已提交
1288 1289 1290 1291 1292 1293 1294
  int background_label_;
  int nms_top_k_;
  int keep_top_k_;
  float nms_threshold_;
  float nms_eta_;
  float score_threshold_;
};
L
liuruilong 已提交
1295
#endif
W
wangliu 已提交
1296

L
lijiancheng0614 已提交
1297 1298 1299 1300 1301 1302 1303 1304 1305
#ifdef POLYGONBOXTRANSFORM_OP
template <typename Dtype>
class PolygonBoxTransformParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  PolygonBoxTransformParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
1306 1307 1308 1309
                           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    output_ = OutputFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1310
  }
1311 1312
  const GType *Input() const { return input_; }
  GType *Output() const { return output_; }
L
lijiancheng0614 已提交
1313 1314

 private:
1315 1316
  GType *input_;
  GType *output_;
L
lijiancheng0614 已提交
1317 1318 1319
};
#endif

N
nhzlx 已提交
1320
template <typename Dtype>
L
liuruilong 已提交
1321
class FeedParam : public OpParam {
N
nhzlx 已提交
1322 1323 1324
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1325 1326
 public:
  FeedParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
1327
            const AttributeMap &attrs, Scope *scope)
1328
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
1329
    input_x_ = InputXFrom<std::vector<LoDTensor>>(inputs, *scope);
H
update  
hjchen2 已提交
1330
    out_ = OutFrom<GType>(outputs, *scope);
H
update  
hjchen2 已提交
1331
    col_ = GetAttr<int>("col", attrs);
H
update  
hjchen2 已提交
1332
    auto var = scope->FindVar("batch_size");
W
wangliu 已提交
1333
    batch_size = var->GetValue<int>();
L
liuruilong 已提交
1334
  }
H
hjchen2 已提交
1335
  const std::vector<LoDTensor> *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
1336
  GType *Out() const { return out_; }
H
update  
hjchen2 已提交
1337
  const int Col() const { return col_; }
W
wangliu 已提交
1338
  const int BatchSize() const { return batch_size; }
L
liuruilong 已提交
1339

L
liuruilong 已提交
1340
 private:
H
hjchen2 已提交
1341
  std::vector<LoDTensor> *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1342
  GType *out_;
H
update  
hjchen2 已提交
1343
  int col_;
W
wangliu 已提交
1344
  int batch_size;
L
liuruilong 已提交
1345 1346
};

N
nhzlx 已提交
1347
template <typename Dtype>
L
liuruilong 已提交
1348
class FetchParam : public OpParam {
N
nhzlx 已提交
1349 1350 1351
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
1352 1353
 public:
  FetchParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
1354
             const AttributeMap &attrs, Scope *scope)
1355
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
1356 1357
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<std::vector<LoDTensor>>(outputs, *scope);
1358
    col_ = GetAttr<int>("col", attrs);
L
liuruilong 已提交
1359
  }
L
liuruilong 已提交
1360

H
hjchen2 已提交
1361 1362
  const GType *InputX() const { return input_x_; }
  std::vector<LoDTensor> *Out() const { return out_; }
1363
  const int Col() const { return col_; }
L
liuruilong 已提交
1364

L
liuruilong 已提交
1365
 private:
H
hjchen2 已提交
1366 1367
  GType *input_x_;
  std::vector<LoDTensor> *out_;
1368
  int col_;
qnqinan's avatar
qnqinan 已提交
1369
#ifdef PADDLE_MOBILE_FPGA
1370

qnqinan's avatar
qnqinan 已提交
1371
 public:
1372
#ifdef PADDLE_MOBILE_FPGA_V1
qnqinan's avatar
qnqinan 已提交
1373
  fpga::BypassArgs fpga_bypass_args;
1374
  Tensor aligned_out;
1375 1376 1377
#else
  std::shared_ptr<Tensor> aligned_out;
#endif
qnqinan's avatar
qnqinan 已提交
1378
#endif
L
liuruilong 已提交
1379 1380
};

L
lijiancheng0614 已提交
1381 1382 1383 1384 1385 1386 1387 1388 1389
#ifdef FILL_CONSTANT_OP
template <typename Dtype>
class FillConstantParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FillConstantParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
1390 1391 1392 1393
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    out_var_ = OutVarFrom(outputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1394 1395 1396 1397 1398 1399 1400
    dtype_ = GetAttr<int>("dtype", attrs);
    shape_ = GetAttr<vector<int>>("shape", attrs);
    value_ = GetAttr<float>("value", attrs);
  }

  Variable *OutVar() const { return out_var_; }

1401
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1402 1403 1404 1405 1406 1407 1408 1409 1410

  const int &DataDtype() const { return dtype_; }

  const vector<int> &Shape() const { return shape_; }

  const float &Value() const { return value_; }

 private:
  Variable *out_var_;
1411
  GType *out_;
L
lijiancheng0614 已提交
1412 1413 1414 1415 1416 1417
  int dtype_;
  vector<int> shape_;
  float value_;
};
#endif

1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466
#ifdef FILL_CONSTANT_BATCH_SIZE_LIKE_OP
template <typename Dtype>
class FillConstantBatchSizeLikeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FillConstantBatchSizeLikeParam(const VariableNameMap &inputs,
                                 const VariableNameMap &outputs,
                                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    out_var_ = OutVarFrom(outputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    dtype_ = GetAttr<int>("dtype", attrs);
    shape_ = GetAttr<vector<int>>("shape", attrs);
    value_ = GetAttr<float>("value", attrs);
    input_dim_idx_ = GetAttr<int>("input_dim_idx", attrs);
    output_dim_idx_ = GetAttr<int>("output_dim_idx", attrs);
  }

  Variable *OutVar() const { return out_var_; }

  const GType *Input() const { return input_; }

  GType *Out() const { return out_; }

  const int &DataDtype() const { return dtype_; }

  const vector<int> &Shape() const { return shape_; }

  const float &Value() const { return value_; }

  int InputDimIdx() const { return input_dim_idx_; }

  int OutputDimIdx() const { return output_dim_idx_; }

 private:
  GType *input_;
  Variable *out_var_;
  GType *out_;
  int dtype_;
  vector<int> shape_;
  float value_;
  int input_dim_idx_;
  int output_dim_idx_;
};
#endif

L
liuruilong 已提交
1467
#ifdef TRANSPOSE_OP
N
nhzlx 已提交
1468
template <typename Dtype>
E
eclipsess 已提交
1469
class TransposeParam : public OpParam {
N
nhzlx 已提交
1470 1471 1472
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1473 1474
 public:
  TransposeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1475 1476 1477 1478
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1479 1480 1481
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

1482
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1483

1484
  GType *Out() const { return out_; }
E
eclipsess 已提交
1485 1486 1487 1488

  const vector<int> &Axis() const { return axis_; }

 private:
1489 1490
  GType *input_x_;
  GType *out_;
E
eclipsess 已提交
1491 1492
  vector<int> axis_;
};
L
liuruilong 已提交
1493
#endif
E
eclipsess 已提交
1494

L
lijiancheng0614 已提交
1495 1496 1497 1498 1499 1500 1501 1502
#ifdef TRANSPOSE2_OP
template <typename Dtype>
class Transpose2Param : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  Transpose2Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
1503 1504 1505 1506 1507
                  const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_xshape_ = OutputXShapeFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1508 1509 1510
    axis_ = GetAttr<vector<int>>("axis", attrs);
  }

1511
  GType *InputX() const { return input_x_; }
L
lijiancheng0614 已提交
1512

1513
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1514

1515
  GType *OutputXShape() const { return output_xshape_; }
L
lijiancheng0614 已提交
1516 1517 1518 1519

  const vector<int> &Axis() const { return axis_; }

 private:
1520 1521 1522
  GType *input_x_;
  GType *out_;
  GType *output_xshape_;
L
lijiancheng0614 已提交
1523 1524 1525 1526
  vector<int> axis_;
};
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
1527 1528 1529 1530 1531 1532 1533 1534
#ifdef LOOKUP_OP
template <typename Dtype>
class LookupParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LookupParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1535 1536 1537 1538 1539
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_w_ = InputWFrom<GType>(inputs, *scope);
    input_ids_ = InputIdsFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565
    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }

  const GType *InputW() const { return input_w_; }
  const GType *InputIds() const { return input_ids_; }
  GType *Out() const { return out_; }
  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_w_;
  GType *input_ids_;
  GType *out_;
  int64_t padding_idx_;
};
#endif

#ifdef CRF_OP
template <typename Dtype>
class CrfParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  //    {G_OP_TYPE_CRF, {{"Emission", "Transition", "Label"}, {"ViterbiPath"}}},

  CrfParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1566 1567
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
xiebaiyuan's avatar
xiebaiyuan 已提交
1568
    // todo crf params
1569 1570 1571 1572
    input_emission_ = InputEmissionFrom<GType>(inputs, *scope);
    input_transition_ = InputTransitionFrom<GType>(inputs, *scope);
    input_label_ = InputLabelFrom<GType>(inputs, *scope);
    output_viterbipath_ = OutputViterbiPathFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
1573 1574 1575 1576 1577 1578
    //    padding_idx_ = GetAttr<int64_t>("padding_idx", attrs);
  }
  const GType *InputEmission() const { return input_emission_; }
  const GType *InputTransition() const { return input_transition_; }
  const GType *InputLabel() const { return input_label_; }
  GType *outputVBP() const { return output_viterbipath_; }
1579 1580
  //  const GType *InputIds() const { return input_ids_; }
  //  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
1581 1582 1583 1584 1585 1586 1587 1588
  //  int64_t PaddingIdx() const { return padding_idx_; }

 private:
  GType *input_emission_;
  GType *input_transition_;
  GType *input_label_;
  GType *output_viterbipath_;

1589 1590
  //  GType *input_ids_;
  //  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1591 1592 1593 1594
  //  int64_t padding_idx_;
};
#endif

L
liuruilong 已提交
1595
#ifdef RESHAPE_OP
N
nhzlx 已提交
1596
template <typename Dtype>
E
eclipsess 已提交
1597
class ReshapeParam : public OpParam {
N
nhzlx 已提交
1598 1599 1600
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1601 1602
 public:
  ReshapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1603 1604 1605 1606 1607
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1608
    shape_ = GetAttr<vector<int>>("shape", attrs);
1609 1610 1611 1612 1613 1614 1615

    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
      DLOG << "ReshapeParam lost inplace params. maybe fluid updated";
    }
E
eclipsess 已提交
1616 1617
  }

1618
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1619

1620
  const GType *InputShape() const { return input_shape_; }
E
eclipsess 已提交
1621

1622
  GType *Out() const { return out_; }
E
eclipsess 已提交
1623 1624 1625 1626 1627 1628

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
1629 1630 1631
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
E
eclipsess 已提交
1632 1633 1634
  vector<int> shape_;
  bool inplace_;
};
L
liuruilong 已提交
1635
#endif
E
eclipsess 已提交
1636

L
lijiancheng0614 已提交
1637 1638 1639 1640 1641 1642 1643 1644
#ifdef RESHAPE2_OP
template <typename Dtype>
class Reshape2Param : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  Reshape2Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
1645 1646 1647 1648 1649 1650
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    output_xshape_ = OutputXShapeFrom<GType>(outputs, *scope);
L
lijiancheng0614 已提交
1651 1652 1653 1654 1655 1656 1657 1658
    shape_ = GetAttr<vector<int>>("shape", attrs);
    if (HasAttr("inplace", attrs)) {
      inplace_ = GetAttr<bool>("inplace", attrs);
    } else {
      inplace_ = false;
    }
  }

1659
  GType *InputX() const { return input_x_; }
L
lijiancheng0614 已提交
1660

E
eclipsess 已提交
1661
  const GType *InputShape() const { return input_shape_; }
L
lijiancheng0614 已提交
1662

E
eclipsess 已提交
1663
  GType *Out() const { return out_; }
L
lijiancheng0614 已提交
1664

E
eclipsess 已提交
1665
  GType *OutputXShape() const { return output_xshape_; }
L
lijiancheng0614 已提交
1666 1667 1668 1669 1670 1671

  const vector<int> &Shape() const { return shape_; }

  const bool &Inplace() const { return inplace_; }

 private:
E
eclipsess 已提交
1672 1673 1674 1675
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
  GType *output_xshape_;
L
lijiancheng0614 已提交
1676 1677 1678 1679 1680
  vector<int> shape_;
  bool inplace_;
};
#endif

T
Tian 已提交
1681
#ifdef SCALE_OP
N
nhzlx 已提交
1682
template <typename Dtype>
I
itminner 已提交
1683
class ScaleParam : public OpParam {
N
nhzlx 已提交
1684 1685 1686
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1687 1688
 public:
  ScaleParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1689 1690 1691 1692
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
1693 1694
    scale_ = GetAttr<float>("scale", attrs);
    bias_ = GetAttr<float>("bias", attrs);
I
itminner 已提交
1695 1696
  }

1697
  const GType *InputX() const { return input_x_; }
I
itminner 已提交
1698

1699
  GType *Out() const { return out_; }
I
itminner 已提交
1700

1701
  const float Scale() const { return scale_; }
I
itminner 已提交
1702

1703
  const float Bias() const { return bias_; }
I
itminner 已提交
1704 1705

 private:
1706 1707
  GType *input_x_;
  GType *out_;
1708 1709
  float scale_;
  float bias_;
I
itminner 已提交
1710
};
T
Tian 已提交
1711 1712 1713
#endif

#ifdef SLICE_OP
N
nhzlx 已提交
1714
template <typename Dtype>
I
itminner 已提交
1715
class SliceParam : public OpParam {
N
nhzlx 已提交
1716 1717 1718
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1719 1720
 public:
  SliceParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1721 1722 1723 1724
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
I
itminner 已提交
1725

1726 1727 1728
    axes_ = GetAttr<std::vector<int>>("axes", attrs);
    starts_ = GetAttr<std::vector<int>>("starts", attrs);
    ends_ = GetAttr<std::vector<int>>("ends", attrs);
1729 1730

    original_output_dims_size_ = output_->dims().size();
1731
  }
I
itminner 已提交
1732

1733 1734 1735 1736 1737 1738
 public:
  GType *input_;
  GType *output_;
  std::vector<int> axes_;
  std::vector<int> starts_;
  std::vector<int> ends_;
1739
  int original_output_dims_size_;
I
itminner 已提交
1740
};
T
Tian 已提交
1741 1742 1743
#endif

#ifdef RESIZE_OP
N
nhzlx 已提交
1744
template <typename Dtype>
T
Tian 已提交
1745
class ResizeParam : public OpParam {
N
nhzlx 已提交
1746 1747 1748
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1749 1750
 public:
  ResizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1751 1752 1753 1754 1755
              const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_shape_ = InputShapeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
I
itminner 已提交
1756 1757 1758 1759 1760 1761
    is_pyramid_test_ = GetAttr<bool>("is_pyramid_test", attrs);
    height_ = GetAttr<int>("height", attrs);
    width_ = GetAttr<int>("width", attrs);
    out_height_scale_ = GetAttr<float>("out_height_scale", attrs);
    out_width_scale_ = GetAttr<float>("out_width_scale", attrs);
  }
T
Tian 已提交
1762

1763
  const GType *InputX() const { return input_x_; }
T
Tian 已提交
1764

1765
  const GType *InputShape() const { return input_shape_; }
T
Tian 已提交
1766

1767
  GType *Out() const { return out_; }
T
Tian 已提交
1768

I
itminner 已提交
1769
  const bool &IsPyramidTest() const { return is_pyramid_test_; }
T
Tian 已提交
1770

I
itminner 已提交
1771
  const int &Height() const { return height_; }
T
Tian 已提交
1772

I
itminner 已提交
1773
  const int &Width() const { return width_; }
T
Tian 已提交
1774

I
itminner 已提交
1775
  const float &OutHeightScale() const { return out_height_scale_; }
T
Tian 已提交
1776

I
itminner 已提交
1777
  const float &OutWidthScale() const { return out_width_scale_; }
T
Tian 已提交
1778

I
itminner 已提交
1779
 private:
1780 1781 1782
  GType *input_x_;
  GType *input_shape_;
  GType *out_;
I
itminner 已提交
1783 1784 1785 1786 1787
  bool is_pyramid_test_;
  int height_;
  int width_;
  float out_height_scale_;
  float out_width_scale_;
T
Tian 已提交
1788 1789 1790
};
#endif

L
liuruilong 已提交
1791
#ifdef RELU_OP
L
liuruilong 已提交
1792 1793 1794
/*
 * @b op 层实例化好这个 param 传递给 kernel 层使用
 * */
N
nhzlx 已提交
1795
template <typename Dtype>
D
relu  
dolphin8 已提交
1796
class ReluParamBase : public OpParam {
N
nhzlx 已提交
1797 1798 1799
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1800
 public:
D
relu  
dolphin8 已提交
1801
  ReluParamBase(const VariableNameMap &inputs, const VariableNameMap &outputs,
1802 1803 1804 1805
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1806 1807
  }

1808
  const GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1809

1810
  GType *Out() const { return out_; }
E
eclipsess 已提交
1811 1812

 private:
1813 1814
  GType *input_x_;
  GType *out_;
E
eclipsess 已提交
1815
};
D
relu  
dolphin8 已提交
1816 1817 1818

template <typename Dtype>
class ReluParam : public ReluParamBase<Dtype> {
Y
yangfei 已提交
1819
 public:
D
relu  
dolphin8 已提交
1820 1821 1822
  using ReluParamBase<Dtype>::ReluParamBase;
};

Z
zp7 已提交
1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836
template <typename Dtype>
class Relu6Param : public ReluParamBase<Dtype> {
 public:
  Relu6Param(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const AttributeMap &attrs, Scope *scope)
      : ReluParamBase<Dtype>(inputs, outputs, attrs, scope) {
    threshold = OpParam::GetAttr<float>("threshold", attrs);
  }
  float getThreshold() const { return threshold; }

 private:
  float threshold;
};

Y
yangfei 已提交
1837
#ifdef PADDLE_MOBILE_CL
D
relu  
dolphin8 已提交
1838 1839
template <>
class ReluParam<GPU_CL> : public ReluParamBase<GPU_CL> {
Y
yangfei 已提交
1840
 public:
D
relu  
dolphin8 已提交
1841
  using ReluParamBase<GPU_CL>::ReluParamBase;
Y
yangfei 已提交
1842 1843 1844
  framework::CLImage &getMidImage() { return midImage; }

 private:
D
relu  
dolphin8 已提交
1845 1846
  framework::CLImage midImage;
};
Y
yangfei 已提交
1847
#endif
D
relu  
dolphin8 已提交
1848

L
liuruilong 已提交
1849
#endif
E
eclipsess 已提交
1850

Z
zhangyang 已提交
1851 1852 1853 1854 1855 1856 1857 1858
#ifdef TANH_OP
template <typename Dtype>
class TanhParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  TanhParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1859 1860 1861 1862
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Z
zhangyang 已提交
1863
  }
1864 1865
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
Z
zhangyang 已提交
1866 1867

 private:
1868 1869
  GType *input_x_;
  GType *out_;
qnqinan's avatar
qnqinan 已提交
1870 1871 1872
#ifdef PADDLE_MOBILE_FPGA

 private:
1873
  std::shared_ptr<GType> float_input_x_;
qnqinan's avatar
qnqinan 已提交
1874 1875 1876
  fpga::BypassArgs fpga_bypass_args;

 public:
1877
  GType *FloatInput() const {
qnqinan's avatar
qnqinan 已提交
1878 1879
    return float_input_x_ == nullptr ? input_x_ : float_input_x_.get();
  }
H
hjchen2 已提交
1880
  void SetFloatInput(LoDTensor *input) { float_input_x_.reset(input); }
qnqinan's avatar
qnqinan 已提交
1881 1882 1883
  const fpga::BypassArgs &FpgaArgs() const { return fpga_bypass_args; }
  void SetFpgaArgs(const fpga::BypassArgs &args) { fpga_bypass_args = args; }
#endif
Z
zhangyang 已提交
1884
};
L
liuruilong 已提交
1885
#endif
E
eclipsess 已提交
1886

T
Tian 已提交
1887
#ifdef PRELU_OP
N
nhzlx 已提交
1888
template <typename Dtype>
T
Tian 已提交
1889
class PReluParam : public OpParam {
N
nhzlx 已提交
1890 1891 1892
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

I
itminner 已提交
1893 1894
 public:
  PReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1895 1896
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
1897
    DLOG << "PReluParam inputs before";
1898 1899
    input_x_ = InputXFrom<GType>(inputs, *scope);
    alpha_ = InputAlphaFrom<GType>(inputs, *scope);
1900
    framework::DDim dims = alpha_->dims();
1901
    out_ = OutFrom<GType>(outputs, *scope);
1902
    mode_ = GetStringAttr("mode", attrs);
1903
    DLOG << "PReluParam mode after" << mode_;
I
itminner 已提交
1904
  }
1905 1906 1907
  const GType *InputX() const { return input_x_; }
  const GType *InputAlpha() const { return alpha_; }
  GType *Out() const { return out_; }
1908
  const std::string &Mode() const { return mode_; }
T
Tian 已提交
1909

I
itminner 已提交
1910
 private:
1911 1912 1913
  GType *input_x_;
  GType *out_;
  GType *alpha_;
1914
  std::string mode_;
T
Tian 已提交
1915 1916 1917
};
#endif

1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942
#ifdef LEAKY_RELU_OP
template <typename Dtype>
class LeakyReluParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LeakyReluParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                 const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    alpha_ = GetAttr<float>("alpha", attrs);
  }
  const GType *InputX() const { return input_x_; }
  const float Alpha() const { return alpha_; }
  GType *Out() const { return out_; }

 private:
  GType *input_x_;
  GType *out_;
  float alpha_;
};
#endif

N
nhzlx 已提交
1943
template <typename Dtype>
L
liuruilong 已提交
1944
class FusionFcParam : public OpParam {
N
nhzlx 已提交
1945 1946 1947
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
1948
 public:
L
liuruilong 已提交
1949
  FusionFcParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
1950 1951 1952 1953 1954 1955
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    input_z_ = InputZFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
1956 1957 1958 1959
    x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
    y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
    axis_ = GetAttr<int>("axis", attrs);
  }
Y
yangfei 已提交
1960
  GType *InputX() const { return input_x_; }
E
eclipsess 已提交
1961

1962
  GType *InputY() const { return input_y_; }
E
eclipsess 已提交
1963

1964
  GType *InputZ() const { return input_z_; }
E
eclipsess 已提交
1965

xiebaiyuan's avatar
xiebaiyuan 已提交
1966
  GType *Out() const { return out_; }
E
eclipsess 已提交
1967 1968 1969 1970 1971 1972 1973 1974

  const int &XNumColDims() const { return x_num_col_dims_; }

  const int &YNumColDims() const { return y_num_col_dims_; }

  const int &Axis() const { return axis_; }

 private:
xiebaiyuan's avatar
xiebaiyuan 已提交
1975
  GType *input_x_;
1976 1977
  GType *input_y_;
  GType *input_z_;
xiebaiyuan's avatar
xiebaiyuan 已提交
1978
  GType *out_;
E
eclipsess 已提交
1979 1980 1981
  int x_num_col_dims_;
  int y_num_col_dims_;
  int axis_;
Z
zhangyang 已提交
1982

Z
ZhenWang 已提交
1983
#ifdef PADDLE_MOBILE_FPGA
1984
 private:  // NOLINT
Z
zhangyang 已提交
1985
  fpga::SplitConvArgs fpga_conv_args;
Z
zhangyang 已提交
1986 1987

 public:
Z
zhangyang 已提交
1988 1989
  const fpga::SplitConvArgs &FpgaArgs() const { return fpga_conv_args; }
  void SetFpgaArgs(const fpga::SplitConvArgs &args) { fpga_conv_args = args; }
Z
zhangyang 已提交
1990
#endif
E
eclipsess 已提交
1991
};
1992 1993

#ifdef FUSION_FCRELU_OP
N
nhzlx 已提交
1994 1995
template <typename DeviceType>
using FusionFcReluParam = FusionFcParam<DeviceType>;
L
liuruilong 已提交
1996
#endif
E
eclipsess 已提交
1997

N
nhzlx 已提交
1998
template <typename Dtype>
1999
class FusionConvAddParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2000 2001 2002
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

W
wangliu 已提交
2003
 public:
L
liuruilong 已提交
2004
  FusionConvAddParam(const VariableNameMap &inputs,
L
liuruilong 已提交
2005
                     const VariableNameMap &outputs, const AttributeMap &attrs,
2006
                     Scope *scope)
2007
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2008
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2009
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2010
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
W
wangliu 已提交
2011
  }
2012
  GType *Bias() const { return bias_; }
W
wangliu 已提交
2013 2014 2015

  const int &Axis() const { return axis_; }

L
liuruilong 已提交
2016
 protected:
2017
  GType *bias_;
W
wangliu 已提交
2018 2019 2020
  int axis_;
};

N
nhzlx 已提交
2021 2022
template <typename Dtype>
Print &operator<<(Print &printer, const FusionConvAddParam<Dtype> &conv_param);
W
wangliu 已提交
2023

Z
zhangyang 已提交
2024
#ifdef FUSION_CONVADDRELU_OP
N
nhzlx 已提交
2025 2026
template <typename DeviceType>
class FusionConvAddReluParam : public FusionConvAddParam<DeviceType> {
L
liuruilong 已提交
2027
 public:
L
liuruilong 已提交
2028
  FusionConvAddReluParam(const VariableNameMap &inputs,
L
liuruilong 已提交
2029
                         const VariableNameMap &outputs,
2030
                         const AttributeMap &attrs, Scope *scope)
2031
      : FusionConvAddParam<DeviceType>(inputs, outputs, attrs, scope) {}
L
liuruilong 已提交
2032 2033 2034
};
#endif

2035
#ifdef FUSION_CONVADDPRELU_OP
2036 2037 2038 2039
template <typename Dtype>
class FusionConvAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2040 2041 2042 2043

 public:
  FusionConvAddPReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2044
                          const AttributeMap &attrs, Scope *scope)
2045
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2046
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, *scope);
2047
    mode_ = OpParam::GetStringAttr("mode", attrs);
2048
    framework::DDim dims = alpha_->dims();
2049
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2050
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2051
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2052
  }
2053
  const GType *InputAlpha() const { return alpha_; }
2054
  const std::string &Mode() const { return mode_; }
2055
  GType *Bias() const { return bias_; }
2056 2057 2058
  const int &Axis() const { return axis_; }

 protected:
2059
  GType *bias_;
2060
  int axis_;
2061
  GType *alpha_;
2062 2063 2064 2065 2066
  std::string mode_;
};
#endif

#ifdef FUSION_CONVADDADDPRELU_OP
2067 2068 2069 2070
template <typename Dtype>
class FusionConvAddAddPReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2071 2072 2073 2074

 public:
  FusionConvAddAddPReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
2075
                             const AttributeMap &attrs, Scope *scope)
2076
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2077 2078
    bias1_ = OpParam::InputYFrom1<GType>(inputs, *scope);
    alpha_ = OpParam::InputAlphaFrom<GType>(inputs, *scope);
2079
    mode_ = OpParam::GetStringAttr("mode", attrs);
2080
    framework::DDim dims = alpha_->dims();
H
update  
hjchen2 已提交
2081
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2082
    axis_ = OpParam::GetAttr<int>("axis", attrs);
2083 2084 2085
    keyOutput_ = OpParam::Getkey("addOut", inputs, 0);
    keyX1_ = OpParam::Getkey("addX", inputs, 1);
    keyY1_ = OpParam::Getkey("Y", inputs, 1);
2086
    if (keyX1_ == keyOutput_) {
2087
      bias1_ = OpParam::InputYFrom1<GType>(inputs, *scope);
2088
    } else if (keyY1_ == keyOutput_) {
2089
      bias1_ = OpParam::InputXFrom1<GType>(inputs, *scope);
2090
    }
H
update  
hjchen2 已提交
2091
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2092
  }
2093
  const GType *InputAlpha() const { return alpha_; }
2094
  const std::string &Mode() const { return mode_; }
2095
  const GType *Bias1() const { return bias1_; }
2096

2097
  GType *Bias() const { return bias_; }
2098 2099 2100 2101

  const int &Axis() const { return axis_; }

 protected:
2102
  GType *bias_;
2103
  int axis_;
2104
  GType *alpha_;
2105
  std::string mode_;
2106
  GType *bias1_;
2107 2108 2109 2110 2111 2112
  std::string keyOutput_;
  std::string keyX1_;
  std::string keyY1_;
};
#endif

E
eclipsess 已提交
2113
#ifdef FUSION_CONVADDBNRELU_OP
N
nhzlx 已提交
2114
template <typename Dtype>
2115
class FusionConvAddBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2116 2117 2118
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
2119 2120 2121
 public:
  FusionConvAddBNReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
2122
                           const AttributeMap &attrs, Scope *scope)
2123
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2124
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2125
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2126 2127 2128 2129
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2130 2131
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2132
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
2133
  }
2134

2135
  ~FusionConvAddBNReluParam() {}
2136

2137
  GType *Bias() const { return bias_; }
E
eclipsess 已提交
2138 2139 2140

  const int &Axis() const { return axis_; }

2141
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
2142

2143
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
2144

2145
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
2146

2147
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
2148 2149 2150 2151 2152

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2153 2154 2155
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2156

2157 2158 2159
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2160

2161
  const GType *NewScale() const { return new_scale_.get(); }
E
eclipsess 已提交
2162

2163
  const GType *NewBias() const { return new_bias_.get(); }
E
eclipsess 已提交
2164 2165

 protected:
2166
  GType *bias_;
E
eclipsess 已提交
2167
  int axis_;
2168 2169 2170 2171
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
E
eclipsess 已提交
2172 2173
  float epsilon_;
  float momentum_;
2174 2175
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2176 2177 2178 2179 2180
};
#endif

#ifdef FUSION_CONVBNADDRELU_OP
template <typename Dtype>
2181
class FusionConvBNAddReluParam : public ConvParam<Dtype> {
2182 2183 2184 2185 2186 2187
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvBNAddReluParam(const VariableNameMap &inputs,
                           const VariableNameMap &outputs,
2188
                           const AttributeMap &attrs, Scope *scope)
2189
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2190
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2191
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2192 2193 2194 2195
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2196 2197
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
2198 2199 2200
    keyBNY_ = OpParam::Getkey("BNY", inputs, 0);
    keyX_ = OpParam::Getkey("X", inputs, 0);
    keyY_ = OpParam::Getkey("Y", inputs, 0);
2201
    if (keyX_ == keyBNY_) {
2202
      bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2203
    } else if (keyY_ == keyBNY_) {
2204
      bias_ = OpParam::InputXFrom<GType>(inputs, *scope);
2205
    }
H
update  
hjchen2 已提交
2206
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2207
  }
2208

2209
  ~FusionConvBNAddReluParam() {}
2210
  GType *Bias() const { return bias_; }
2211 2212 2213

  const int &Axis() const { return axis_; }

2214
  const GType *InputBias() const { return input_bias_; }
2215

2216
  const GType *InputMean() const { return input_mean_; }
2217

2218
  const GType *InputScale() const { return input_scale_; }
2219

2220
  const GType *InputVariance() const { return input_variance_; }
2221 2222 2223 2224 2225

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2226 2227 2228
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2229

2230 2231 2232
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2233

2234
  const GType *NewScale() const { return new_scale_.get(); }
2235

2236
  const GType *NewBias() const { return new_bias_.get(); }
2237 2238

 protected:
2239
  GType *bias_;
2240
  int axis_;
2241 2242 2243 2244
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2245 2246
  float epsilon_;
  float momentum_;
2247 2248
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2249 2250 2251
  std::string keyBNY_;
  std::string keyX_;
  std::string keyY_;
E
eclipsess 已提交
2252
};
2253
#endif
E
eclipsess 已提交
2254

Z
zhangyang 已提交
2255
#ifdef FUSION_CONVBN_OP
N
nhzlx 已提交
2256
template <typename Dtype>
2257
class FusionConvBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2258 2259 2260
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Z
zhangyang 已提交
2261 2262 2263
 public:
  FusionConvBNParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
2264
                    Scope *scope)
2265
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2266 2267 2268 2269
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2270 2271
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2272
    this->output_ = OpParam::OutputYFrom<GType>(outputs, *scope);
Z
zhangyang 已提交
2273 2274
  }

2275
  const GType *InputBias() const { return input_bias_; }
Z
zhangyang 已提交
2276

2277
  const GType *InputMean() const { return input_mean_; }
Z
zhangyang 已提交
2278

2279
  const GType *InputScale() const { return input_scale_; }
Z
zhangyang 已提交
2280

2281
  const GType *InputVariance() const { return input_variance_; }
Z
zhangyang 已提交
2282 2283 2284 2285 2286

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2287 2288 2289
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
Z
zhangyang 已提交
2290

2291 2292 2293
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
Z
zhangyang 已提交
2294

2295
  const GType *NewScale() const { return new_scale_.get(); }
Z
zhangyang 已提交
2296

2297
  const GType *NewBias() const { return new_bias_.get(); }
Z
zhangyang 已提交
2298 2299

 protected:
2300 2301 2302 2303
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
Z
zhangyang 已提交
2304 2305
  float epsilon_;
  float momentum_;
2306 2307
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
Z
zhangyang 已提交
2308 2309 2310
};
#endif

2311
#ifdef FUSION_CONVADDBN_OP
N
nhzlx 已提交
2312
template <typename Dtype>
2313
class FusionConvAddBNParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2314 2315 2316
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

2317 2318 2319
 public:
  FusionConvAddBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
2320
                       const AttributeMap &attrs, Scope *scope)
2321
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
2322
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
2323
    axis_ = OpParam::GetAttr<int>("axis", attrs);
H
update  
hjchen2 已提交
2324 2325 2326 2327
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2328 2329
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2330
    this->output_ = OpParam::OutputYFrom<GType>(outputs, *scope);
2331
  }
2332
  GType *Bias() const { return bias_; }
2333 2334 2335

  const int &Axis() const { return axis_; }

2336
  const GType *InputBias() const { return input_bias_; }
2337

2338
  const GType *InputMean() const { return input_mean_; }
2339

2340
  const GType *InputScale() const { return input_scale_; }
2341

2342
  const GType *InputVariance() const { return input_variance_; }
2343 2344 2345 2346 2347

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2348 2349 2350
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2351

2352 2353 2354
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2355

2356
  const GType *NewScale() const { return new_scale_.get(); }
2357

2358
  const GType *NewBias() const { return new_bias_.get(); }
2359 2360

 protected:
2361
  GType *bias_;
2362
  int axis_;
2363 2364 2365 2366
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2367 2368
  float epsilon_;
  float momentum_;
2369 2370
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2371
};
E
eclipsess 已提交
2372
#endif
Y
Yao,kun 已提交
2373

E
eclipsess 已提交
2374
#ifdef FUSION_DWCONVBNRELU_OP
N
nhzlx 已提交
2375
template <typename Dtype>
2376
class FusionDWConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2377 2378 2379
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

E
eclipsess 已提交
2380 2381 2382
 public:
  FusionDWConvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2383
                          const AttributeMap &attrs, Scope *scope)
2384
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2385 2386 2387 2388
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2389 2390
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2391
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
E
eclipsess 已提交
2392 2393
  }

2394
  ~FusionDWConvBNReluParam() {}
2395

2396
  const GType *InputBias() const { return input_bias_; }
E
eclipsess 已提交
2397

2398
  const GType *InputMean() const { return input_mean_; }
E
eclipsess 已提交
2399

2400
  const GType *InputScale() const { return input_scale_; }
E
eclipsess 已提交
2401

2402
  const GType *InputVariance() const { return input_variance_; }
E
eclipsess 已提交
2403 2404 2405 2406 2407

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2408 2409 2410
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2411

2412 2413 2414
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
E
eclipsess 已提交
2415

2416
  const GType *NewScale() const { return new_scale_.get(); }
E
eclipsess 已提交
2417

2418
  const GType *NewBias() const { return new_bias_.get(); }
E
eclipsess 已提交
2419 2420

 protected:
2421 2422 2423 2424
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
E
eclipsess 已提交
2425 2426
  float epsilon_;
  float momentum_;
2427 2428
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
E
eclipsess 已提交
2429 2430 2431 2432
};

#endif

2433 2434 2435 2436 2437 2438 2439 2440 2441 2442 2443 2444 2445 2446 2447 2448
#ifdef FUSION_CONVRELU_OP
template <typename Dtype>
class FusionConvReluParam : public ConvParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionConvReluParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
                      Scope *scope)
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
  }
};
#endif

2449
#ifdef FUSION_CONVBNRELU_OP
N
nhzlx 已提交
2450
template <typename Dtype>
2451
class FusionConvBNReluParam : public ConvParam<Dtype> {
N
nhzlx 已提交
2452 2453 2454
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

2455 2456 2457
 public:
  FusionConvBNReluParam(const VariableNameMap &inputs,
                        const VariableNameMap &outputs,
2458
                        const AttributeMap &attrs, Scope *scope)
2459
      : ConvParam<Dtype>(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
2460 2461 2462 2463
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2464 2465
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
H
update  
hjchen2 已提交
2466
    this->output_ = OpParam::OutFrom<GType>(outputs, *scope);
2467 2468
  }

2469
  ~FusionConvBNReluParam() {}
2470

2471
  const GType *InputBias() const { return input_bias_; }
2472

2473
  const GType *InputMean() const { return input_mean_; }
2474

2475
  const GType *InputScale() const { return input_scale_; }
2476

2477
  const GType *InputVariance() const { return input_variance_; }
2478 2479 2480 2481 2482

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

2483 2484 2485
  void SetNewScale(GType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2486

2487 2488 2489
  void SetNewBias(GType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2490

2491
  const GType *NewScale() const { return new_scale_.get(); }
2492

2493
  const GType *NewBias() const { return new_bias_.get(); }
2494 2495

 protected:
2496 2497 2498 2499
  GType *input_bias_;
  GType *input_mean_;
  GType *input_scale_;
  GType *input_variance_;
2500 2501
  float epsilon_;
  float momentum_;
2502 2503
  std::shared_ptr<GType> new_bias_;
  std::shared_ptr<GType> new_scale_;
2504 2505 2506
};
#endif

Y
Yao,kun 已提交
2507
#ifdef IM2SEQUENCE_OP
N
nhzlx 已提交
2508
template <typename Dtype>
Y
Yao,kun 已提交
2509
class Im2SequenceParam : public OpParam {
N
nhzlx 已提交
2510 2511 2512
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
2513 2514 2515
 public:
  Im2SequenceParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
2516 2517 2518 2519
                   Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Y
Yao,kun 已提交
2520 2521 2522 2523 2524
    kernels_ = GetAttr<vector<int>>("kernels", attrs);
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
  }

E
eclipsess 已提交
2525
  const GType *Input() const { return input_x_; }
Y
Yao,kun 已提交
2526

E
eclipsess 已提交
2527
  GType *Output() const { return out_; }
Y
Yao,kun 已提交
2528 2529 2530 2531 2532 2533 2534 2535

  const vector<int> &Kernels() const { return kernels_; }

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

 private:
E
eclipsess 已提交
2536 2537
  GType *input_x_;
  GType *out_;
Y
Yao,kun 已提交
2538 2539 2540 2541
  vector<int> kernels_;
  vector<int> strides_;
  vector<int> paddings_;
};
2542
#endif
Y
Yao,kun 已提交
2543

2544
#ifdef DROPOUT_OP
N
nhzlx 已提交
2545
template <typename Dtype>
Y
Yao,kun 已提交
2546
class DropoutParam : public OpParam {
N
nhzlx 已提交
2547 2548 2549
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

Y
Yao,kun 已提交
2550 2551
 public:
  DropoutParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2552 2553 2554 2555
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
Y
yangfei 已提交
2556 2557

    dropout_prob_ = GetAttr<float>("dropout_prob", attrs);
Y
Yao,kun 已提交
2558 2559
  }

2560
  const GType *InputX() const { return input_x_; }
Y
Yao,kun 已提交
2561

2562
  GType *Out() const { return out_; }
Y
Yao,kun 已提交
2563

Y
yangfei 已提交
2564 2565
  float DropoutProb() const { return dropout_prob_; }

Y
Yao,kun 已提交
2566
 private:
2567 2568
  GType *input_x_;
  GType *out_;
Y
yangfei 已提交
2569
  float dropout_prob_;
Y
Yao,kun 已提交
2570
};
2571
#endif
Y
Yao,kun 已提交
2572

N
nhzlx 已提交
2573
template <typename Dtype>
L
liuruilong 已提交
2574
class ConvTransposeParam : public OpParam {
N
nhzlx 已提交
2575 2576 2577
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

L
liuruilong 已提交
2578 2579 2580
 public:
  ConvTransposeParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
2581 2582
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
2583 2584
    filter_ = OpParam::FilterFrom<GType>(inputs, *scope);
    input_ = OpParam::InputFrom<GType>(inputs, *scope);
2585
    // output_ = OutputFrom<GType>(outputs, scope);
qnqinan's avatar
qnqinan 已提交
2586
    if (outputs.count("Output")) {
2587
      output_ = OpParam::OutputFrom<GType>(outputs, *scope);
qnqinan's avatar
qnqinan 已提交
2588
    }
L
liuruilong 已提交
2589 2590 2591
    strides_ = GetAttr<vector<int>>("strides", attrs);
    paddings_ = GetAttr<vector<int>>("paddings", attrs);
    dilations_ = GetAttr<vector<int>>("dilations", attrs);
2592 2593 2594 2595
    if (HasAttr("output_size", attrs)) {
      output_size_ = GetAttr<vector<int>>("output_size", attrs);
      DLOG << "conv transpose output size: " << output_size_;
    }
L
liuruilong 已提交
2596 2597 2598
    groups = GetAttr<int>("groups", attrs);
  }

2599
  const GType *Input() const { return input_; }
L
liuruilong 已提交
2600

2601
  GType *Filter() const { return filter_; }
L
liuruilong 已提交
2602

2603
  GType *Output() const { return output_; }
L
liuruilong 已提交
2604 2605 2606 2607 2608

  const vector<int> &Strides() const { return strides_; }

  const vector<int> &Paddings() const { return paddings_; }

2609 2610 2611 2612
  const vector<int> &Filters() const { return filter_; }

  const vector<int> &TransFilters() const { return transformed_filter_; }

L
liuruilong 已提交
2613 2614
  const vector<int> &Dilations() const { return dilations_; }

2615 2616
  const vector<int> &OutputSize() const { return output_size_; }

L
liuruilong 已提交
2617 2618
  const int &Groups() const { return groups; }

H
hjchen2 已提交
2619 2620 2621 2622 2623
  enum ExecMode {
    EXEC_INVALID = 0,
    EXEC_GEMM_FLOAT,
    EXEC_DECONV3X3_FLOAT,
    EXEC_DECONV4X4_FLOAT,
2624 2625
    EXEC_DEPTHWISETRANS_FLOAT,
    EXEC_CONVTRANS3x3s2_FLOAT,
H
hjchen2 已提交
2626 2627 2628 2629
  };

  ExecMode &ExecMode() const { return exec_mode_; }

L
liuruilong 已提交
2630
 private:
2631 2632 2633
  GType *input_;
  GType *output_;
  GType *filter_;
2634
  GType *transformed_filter_;
L
liuruilong 已提交
2635 2636 2637
  vector<int> strides_;
  vector<int> paddings_;
  vector<int> dilations_;
2638
  vector<int> output_size_;
L
liuruilong 已提交
2639
  int groups;
H
hjchen2 已提交
2640
  mutable enum ExecMode exec_mode_;
Z
zhangyang 已提交
2641 2642 2643 2644 2645

#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::DeconvArgs fpga_conv_args;
qnqinan's avatar
qnqinan 已提交
2646
  fpga::DWDeconvArgs fpga_DWDeconv_args;
Z
zhangyang 已提交
2647 2648 2649

 public:
  const fpga::DeconvArgs &FpgaArgs() const { return fpga_conv_args; }
qnqinan's avatar
qnqinan 已提交
2650 2651 2652
  const fpga::DWDeconvArgs &FpgaDWDconvArgs() const {
    return fpga_DWDeconv_args;
  }
Z
zhangyang 已提交
2653
  void SetFpgaArgs(const fpga::DeconvArgs &args) { fpga_conv_args = args; }
qnqinan's avatar
qnqinan 已提交
2654 2655 2656
  void SetFpgaArgs(const fpga::DWDeconvArgs &args) {
    fpga_DWDeconv_args = args;
  }
Z
zhangyang 已提交
2657
#endif
L
liuruilong 已提交
2658
};
Z
zhangyang 已提交
2659

qnqinan's avatar
qnqinan 已提交
2660 2661 2662 2663 2664
#ifdef FUSION_DECONVADD_OP
template <typename Dtype>
class FusionDeconvAddParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
2665 2666

 public:
qnqinan's avatar
qnqinan 已提交
2667
  FusionDeconvAddParam(const VariableNameMap &inputs,
2668
                       const VariableNameMap &outputs,
2669
                       const AttributeMap &attrs, Scope *scope)
2670
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2671
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
qnqinan's avatar
qnqinan 已提交
2672
    axis_ = OpParam::GetAttr<int>("axis", attrs);
2673
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
qnqinan's avatar
qnqinan 已提交
2674
  }
2675
  GType *Bias() const { return bias_; }
qnqinan's avatar
qnqinan 已提交
2676 2677 2678

  const int &Axis() const { return axis_; }

2679
  GType *Output() const { return output_; }
qnqinan's avatar
qnqinan 已提交
2680 2681

 protected:
2682
  GType *bias_;
qnqinan's avatar
qnqinan 已提交
2683
  int axis_;
2684
  GType *output_;
qnqinan's avatar
qnqinan 已提交
2685 2686 2687 2688 2689 2690 2691
};
#endif

#ifdef FUSION_DECONVADDRELU_OP
template <typename Dtype>
using FusionDeconvAddReluParam = FusionDeconvAddParam<Dtype>;
#endif
2692 2693 2694 2695 2696 2697 2698 2699 2700
#ifdef FUSION_DECONVADDBN_OP
template <typename Dtype>
class FusionDeconvAddBNParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvAddBNParam(const VariableNameMap &inputs,
                         const VariableNameMap &outputs,
2701
                         const AttributeMap &attrs, Scope *scope)
2702
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2703 2704 2705 2706 2707
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2708 2709 2710 2711 2712 2713 2714
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }
2715 2716 2717 2718 2719 2720 2721 2722 2723 2724 2725 2726 2727

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2728 2729 2730
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2731

2732 2733 2734
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2735

2736
  const RType *NewScale() const { return new_scale_.get(); }
2737

2738
  const RType *NewBias() const { return new_bias_.get(); }
2739 2740 2741 2742 2743 2744 2745 2746 2747 2748

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2749 2750
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2751 2752 2753 2754 2755 2756 2757 2758 2759 2760 2761
};
#endif
#ifdef FUSION_DECONVBNRELU_OP
template <typename Dtype>
class FusionDeconvBNReluParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvBNReluParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
2762
                          const AttributeMap &attrs, Scope *scope)
2763
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2764 2765 2766 2767 2768
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2769 2770 2771 2772 2773 2774
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }
2775 2776 2777 2778 2779 2780 2781 2782 2783 2784 2785 2786 2787

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2788 2789 2790
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2791

2792 2793 2794
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2795

2796
  const RType *NewScale() const { return new_scale_.get(); }
2797

2798
  const RType *NewBias() const { return new_bias_.get(); }
2799 2800 2801 2802 2803 2804 2805 2806 2807 2808

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2809 2810
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821
};
#endif
#ifdef FUSION_DECONVADDBNRELU_OP
template <typename Dtype>
class FusionDeconvAddBNReluParam : public ConvTransposeParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDeconvAddBNReluParam(const VariableNameMap &inputs,
                             const VariableNameMap &outputs,
2822
                             const AttributeMap &attrs, Scope *scope)
2823
      : ConvTransposeParam<Dtype>(inputs, outputs, attrs, scope) {
2824 2825 2826 2827 2828
    output_ = OpParam::OutFrom<GType>(outputs, *scope);
    input_bias_ = OpParam::InputBiasFrom<GType>(inputs, *scope);
    input_mean_ = OpParam::InputMeanFrom<GType>(inputs, *scope);
    input_scale_ = OpParam::InputScaleFrom<GType>(inputs, *scope);
    input_variance_ = OpParam::InputVarianceFrom<GType>(inputs, *scope);
2829 2830 2831 2832 2833 2834 2835 2836 2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
    momentum_ = OpParam::GetAttr<float>("momentum", attrs);
    //    is_test_ = OpParam::GetAttr<bool>("is_test", attrs);
  }
  RType *Output() const { return output_; }

  const RType *InputBias() const { return input_bias_; }

  const RType *InputMean() const { return input_mean_; }

  const RType *InputScale() const { return input_scale_; }

  const RType *InputVariance() const { return input_variance_; }

  const float &Epsilon() const { return epsilon_; }

  const float &Momentum() const { return momentum_; }

  const bool &IsTest() const { return is_test_; }

2849 2850 2851
  void SetNewScale(RType *new_scale) {
    new_scale_.reset(new_scale, CLImageDeleter<Dtype>());
  }
2852

2853 2854 2855
  void SetNewBias(RType *new_bias) {
    new_bias_.reset(new_bias, CLImageDeleter<Dtype>());
  }
2856

2857
  const RType *NewScale() const { return new_scale_.get(); }
2858

2859
  const RType *NewBias() const { return new_bias_.get(); }
2860 2861 2862 2863 2864 2865 2866 2867 2868 2869

 protected:
  RType *output_;
  RType *input_bias_;
  RType *input_mean_;
  RType *input_scale_;
  RType *input_variance_;
  float epsilon_;
  float momentum_;
  bool is_test_;
2870 2871
  std::shared_ptr<RType> new_bias_;
  std::shared_ptr<RType> new_scale_;
2872 2873
};
#endif
L
liuruilong 已提交
2874

Z
zhangyang 已提交
2875 2876 2877 2878 2879
#ifdef FUSION_DECONVRELU_OP
template <typename Dtype>
using FusionDeconvReluParam = ConvTransposeParam<Dtype>;
#endif

xiebaiyuan's avatar
xiebaiyuan 已提交
2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893
#ifdef GRU_OP
template <typename Dtype>
class GruParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  /**
   *
   * @param inputs
   * @param outputs
   * @param attrs
   * @param scope
   * */
  GruParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2894 2895 2896 2897 2898 2899 2900 2901
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_input_ = InputFrom<GType>(inputs, *scope);
    input_h0_ = InputH0From<GType>(inputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_weight_ = InputWeightFrom<GType>(inputs, *scope);

    output_batch_gate_ = OutputBatchGateFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2902
    output_batch_reset_hidden_prev_ =
2903 2904 2905
        OutputBatchResetHiddenPrevFrom<GType>(outputs, *scope);
    output_batch_hidden_ = OutputBatchHiddenFrom<GType>(outputs, *scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, *scope);
2906 2907
    activation_ = GetStringAttr("activation", attrs);
    gate_activation_ = GetStringAttr("gate_activation", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
2908 2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940
    is_reverse_ = GetAttr<bool>("is_reverse", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputH0() const { return input_h0_; }
  const GType *InputBias() const { return input_bias_; }
  const std::string &Activation() const { return activation_; }
  const std::string &GateActivation() const { return gate_activation_; }
  const bool &IsReverse() const { return is_reverse_; }

  GType *OutBatchGate() const { return output_batch_gate_; }
  GType *OutBatchResetHiddenPrev() const {
    return output_batch_reset_hidden_prev_;
  }
  GType *OutBatchHidden() const { return output_batch_hidden_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_h0_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_batch_gate_;
  GType *output_batch_reset_hidden_prev_;
  GType *output_batch_hidden_;
  GType *output_hidden_;
  std::string activation_;
  std::string gate_activation_;
  bool is_reverse_;
};
#endif

Z
zhaojiaying01 已提交
2941 2942 2943 2944 2945 2946 2947
#ifdef GRU_UNIT_OP
template <typename Dtype>
class GruUnitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;

 public:
  GruUnitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2948 2949 2950 2951 2952 2953 2954 2955
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_input_ = InputFrom<GType>(inputs, *scope);
    input_hidden_prev_ = InputHiddenPrevFrom<GType>(inputs, *scope);
    input_bias_ = InputBiasFrom<GType>(inputs, *scope);
    input_weight_ = InputWeightFrom<GType>(inputs, *scope);

    output_gate_ = OutputGateFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
2956
    output_reset_hidden_prev_ =
2957 2958
        OutputResetHiddenPrevFrom<GType>(outputs, *scope);
    output_hidden_ = OutputHiddenFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
2959 2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981 2982 2983 2984 2985 2986
    activation_ = GetAttr<int>("activation", attrs);
    gate_activation_ = GetAttr<int>("gate_activation", attrs);
  }
  const GType *InputInput() const { return input_input_; }
  const GType *InputWeight() const { return input_weight_; }
  const GType *InputHiddenPrev() const { return input_hidden_prev_; }
  const GType *InputBias() const { return input_bias_; }
  const int &Activation() const { return activation_; }
  const int &GateActivation() const { return gate_activation_; }

  GType *OutGate() const { return output_gate_; }
  GType *OutResetHiddenPrev() const { return output_reset_hidden_prev_; }
  GType *OutHidden() const { return output_hidden_; }

 private:
  GType *input_input_;
  GType *input_hidden_prev_;
  GType *input_bias_;
  GType *input_weight_;

  GType *output_gate_;
  GType *output_reset_hidden_prev_;
  GType *output_hidden_;
  int activation_;
  int gate_activation_;
};
#endif

2987 2988 2989 2990 2991 2992 2993 2994
#ifdef FLATTEN_OP
template <typename Dtype>
class FlattenParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FlattenParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
2995 2996 2997 2998
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
2999
    axis = GetAttr<int>("axis", attrs);
3000
  }
3001 3002
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3003
  const int &Axis() const { return axis; }
3004 3005

 private:
3006 3007
  GType *input_x_;
  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3008
  int axis;
3009 3010 3011 3012 3013 3014 3015 3016 3017 3018 3019
};
#endif

#ifdef SPLIT_OP
template <typename Dtype>
class SplitParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SplitParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3020 3021 3022 3023
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    outs_ = OutMultiFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3024
    axis = GetAttr<int>("axis", attrs);
xiebaiyuan's avatar
xiebaiyuan 已提交
3025 3026 3027 3028 3029 3030
    num = GetAttr<int>("num", attrs);
    sections = GetAttr<std::vector<int>>("sections", attrs);

    //    for (int i = 0; i < outs_.size(); ++i) {
    //      out_ts_.push_back(*scope.FindVar(outs_[i])->GetMutable());
    //    }
3031
  }
3032
  GType *InputX() const { return input_x_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3033 3034 3035 3036 3037
  std::vector<GType *> Outs() const { return outs_; }
  int Axis() const { return axis; }
  int Num() const { return num; }
  std::vector<int> Sections() const { return sections; }
  //  std::vector<GType> OutTs() const { return out_ts_; }
3038 3039

 private:
3040
  GType *input_x_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3041
  std::vector<GType *> outs_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3042
  int axis;
xiebaiyuan's avatar
xiebaiyuan 已提交
3043 3044 3045
  int num;
  std::vector<int> sections;
  //  std::vector<GType> out_ts_;
3046 3047 3048 3049 3050 3051 3052 3053 3054
#ifdef PADDLE_MOBILE_FPGA

 private:
  fpga::SplitArgs fpga_split_args;

 public:
  const fpga::SplitArgs &FpgaArgs() const { return fpga_split_args; }
  void SetFpgaArgs(const fpga::SplitArgs &args) { fpga_split_args = args; }
#endif
3055 3056 3057 3058 3059 3060 3061 3062 3063 3064 3065 3066
};
#endif

#ifdef BILINEAR_INTERP_OP
template <typename Dtype>
class BilinearInterpParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  BilinearInterpParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
3067 3068 3069 3070 3071
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_outsize_ = InputOutSizeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
xiebaiyuan's avatar
xiebaiyuan 已提交
3072 3073
    out_h_ = GetAttr<int>("out_h", attrs);
    out_w_ = GetAttr<int>("out_w", attrs);
3074
  }
3075
  const GType *InputX() const { return input_x_; }
3076 3077 3078 3079 3080 3081 3082 3083 3084 3085 3086 3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101 3102 3103 3104 3105 3106 3107
  const GType *InputOutPutSize() const { return input_outsize_; }
  GType *Out() const { return out_; }
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }

 private:
  GType *input_x_;
  GType *input_outsize_;
  GType *out_;
  int out_h_;
  int out_w_;
};
#endif

#ifdef NEAREST_INTERP_OP
template <typename Dtype>
class NearestInterpolationParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  NearestInterpolationParam(const VariableNameMap &inputs,
                            const VariableNameMap &outputs,
                            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_outsize_ = InputOutSizeFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    out_h_ = GetAttr<int>("out_h", attrs);
    out_w_ = GetAttr<int>("out_w", attrs);
  }
  const GType *InputX() const { return input_x_; }
3108 3109
  const GType *InputOutPutSize() const { return input_outsize_; }
  GType *Out() const { return out_; }
xiebaiyuan's avatar
xiebaiyuan 已提交
3110 3111
  int OutH() const { return out_h_; }
  int OutW() const { return out_w_; }
3112 3113

 private:
3114 3115 3116
  GType *input_x_;
  GType *input_outsize_;
  GType *out_;
xiebaiyuan's avatar
xiebaiyuan 已提交
3117 3118
  int out_h_;
  int out_w_;
3119 3120 3121 3122 3123 3124 3125 3126 3127 3128 3129
};
#endif

#ifdef SHAPE_OP
template <typename Dtype>
class ShapeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  ShapeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3130 3131 3132 3133
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
3134
  }
3135 3136
  const GType *Input() const { return input_; }
  GType *Out() const { return out_; }
3137 3138

 private:
3139 3140
  GType *input_;
  GType *out_;
3141 3142 3143
};
#endif

H
hjchen2 已提交
3144 3145 3146 3147 3148 3149 3150 3151
#ifdef TOP_K_OP
template <typename Dtype>
class TopKParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  TopKParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3152 3153 3154 3155 3156
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
    indices_ = OpParam::GetVarValue<GType>("Indices", outputs, *scope);
H
hjchen2 已提交
3157 3158 3159 3160
    k_ = OpParam::GetAttr<int>("k", attrs);
  }

 public:
3161 3162 3163
  GType *input_;
  GType *output_;
  GType *indices_;
H
hjchen2 已提交
3164 3165 3166 3167 3168 3169 3170 3171 3172 3173 3174 3175
  int k_;
};
#endif  // TOP_K_OP

#ifdef CAST_OP
template <typename Dtype>
class CastParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  CastParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3176 3177 3178 3179
            const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
H
hjchen2 已提交
3180 3181 3182 3183 3184
    input_type_ = OpParam::GetAttr<int>("in_dtype", attrs);
    output_type_ = OpParam::GetAttr<int>("out_dtype", attrs);
  }

 public:
3185 3186
  GType *input_;
  GType *output_;
H
hjchen2 已提交
3187 3188 3189 3190 3191
  int input_type_;
  int output_type_;
};
#endif  // CAST_OP

3192
#ifdef QUANT_OP
3193
template <typename Dtype>
3194 3195 3196 3197 3198
class QuantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3199
  QuantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3200 3201 3202 3203
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3204 3205
    // online
    // scale = max(abs(x))
3206
    online_scale_ = OpParam::GetVarValue<GType>("OutScale", outputs, *scope);
3207
    // offline
3208
    if (inputs.count("InScale")) {
3209
      offline_ = true;
3210
      offline_scale_ = OpParam::GetVarValue<GType>("InScale", inputs, *scope);
3211 3212
    }
    // x = round(scale * x)
3213 3214
    if (OpParam::HasAttr("round_type", attrs)) {
      round_type_ = OpParam::GetAttr<RoundType>("round_type", attrs);
H
hjchen2 已提交
3215
    }
3216 3217 3218 3219
  }

 public:
  // op input
3220
  GType *input_;
3221
  // op output
3222
  GType *output_;
3223
  GType *online_scale_;
3224
  // quantize offline scale
3225
  GType *offline_scale_;
3226 3227
  // if offine scale or not
  bool offline_ = false;
3228
  // round method type
3229 3230
  // RoundType round_type_ = ROUND_NEAREST_AWAY_ZERO;
  RoundType round_type_ = ROUND_NEAREST_TOWARDS_ZERO;
3231
};
3232
#endif
3233

3234
#ifdef DEQUANT_OP
3235
template <typename Dtype>
3236 3237 3238 3239 3240
class DequantizeParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3241
  DequantizeParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3242 3243 3244 3245 3246
                  const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
    activation_scale_ = OpParam::GetVarValue<GType>("Scale", inputs, *scope);
3247
    // dequantization is performed as x = x / static_scale / online_scale
3248 3249
    if (OpParam::HasAttr("weight_scale", attrs)) {
      weight_scale_ = OpParam::GetAttr<float>("weight_scale", attrs);
3250
    } else {
3251
      weight_scale_ = OpParam::GetAttr<float>("max_range", attrs);
3252 3253 3254 3255 3256
    }
  }

 public:
  // op input
3257
  GType *input_;
3258
  // op output
3259
  GType *output_;
3260
  GType *activation_scale_;
3261 3262
  float weight_scale_;
};
3263
#endif
3264

3265 3266 3267 3268
#if defined(FUSION_DEQUANT_BN_OP) || defined(FUSION_DEQUANT_ADD_BN_OP) || \
    defined(FUSION_DEQUANT_ADD_BN_RELU_OP) ||                             \
    defined(FUSION_DEQUANT_BN_RELU_OP) ||                                 \
    defined(FUSION_DEQUANT_ADD_BN_QUANT_OP) ||                            \
3269
    defined(FUSION_DEQUANT_ADD_BN_RELU_QUANT_OP)
H
hjchen2 已提交
3270
template <typename Dtype>
3271
class FusionDequantBNParam : public DequantizeParam<Dtype> {
H
hjchen2 已提交
3272 3273 3274 3275
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3276 3277
  FusionDequantBNParam(const VariableNameMap &inputs,
                       const VariableNameMap &outputs,
3278
                       const AttributeMap &attrs, Scope *scope)
H
hjchen2 已提交
3279 3280
      : DequantizeParam<Dtype>(inputs, outputs, attrs, scope) {
    // batch norm params
3281 3282 3283 3284
    bn_mean_ = OpParam::GetVarValue<GType>("BNMean", inputs, *scope);
    bn_variance_ = OpParam::GetVarValue<GType>("BNVariance", inputs, *scope);
    bn_scale_ = OpParam::GetVarValue<GType>("BNScale", inputs, *scope);
    bn_bias_ = OpParam::GetVarValue<GType>("BNBias", inputs, *scope);
H
hjchen2 已提交
3285 3286 3287 3288 3289
    epsilon_ = OpParam::GetAttr<float>("epsilon", attrs);
  }

 public:
  // batch norm
3290 3291 3292 3293
  GType *bn_mean_;
  GType *bn_variance_;
  GType *bn_scale_;
  GType *bn_bias_;
H
hjchen2 已提交
3294
  float epsilon_;
3295 3296 3297
};
#endif

3298 3299 3300 3301
#if defined(FUSION_DEQUANT_ADD_BN_RELU_OP) ||  \
    defined(FUSION_DEQUANT_ADD_BN_OP) ||       \
    defined(FUSION_DEQUANT_ADD_BN_QUANT_OP) || \
    defined(FUSION_DEQUANT_ADD_BN_RELU_QUANT_OP)
3302 3303 3304 3305 3306 3307 3308 3309
template <typename Dtype>
class FusionDequantAddBNParam : public FusionDequantBNParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDequantAddBNParam(const VariableNameMap &inputs,
                          const VariableNameMap &outputs,
3310
                          const AttributeMap &attrs, Scope *scope)
3311 3312 3313
      : FusionDequantBNParam<Dtype>(inputs, outputs, attrs, scope) {
    // element wise add params
    axis_ = OpParam::GetAttr<int>("axis", attrs);
3314
    bias_ = OpParam::InputYFrom<GType>(inputs, *scope);
3315 3316 3317 3318 3319
  }

 public:
  // elementwise add
  int axis_;
3320
  GType *bias_;
3321 3322 3323
};
#endif

3324 3325 3326 3327 3328 3329 3330 3331 3332
#ifdef FUSION_DEQUANT_ADD_BN_QUANT_OP
template <typename Dtype>
class FusionDequantAddBNQuantParam : public FusionDequantAddBNParam<Dtype> {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  FusionDequantAddBNQuantParam(const VariableNameMap &inputs,
                               const VariableNameMap &outputs,
3333
                               const AttributeMap &attrs, Scope *scope)
3334 3335
      : FusionDequantAddBNParam<Dtype>(inputs, outputs, attrs, scope) {
    // scale output
3336
    online_scale_ = OpParam::GetVarValue<GType>("OutScale", outputs, *scope);
3337
    // offline
3338 3339
    if (inputs.count("InScale")) {
      offline_ = true;
3340
      offline_scale_ = OpParam::GetVarValue<GType>("InScale", inputs, *scope);
3341 3342 3343 3344 3345 3346 3347 3348
    }
    // x = round(scale * x)
    if (OpParam::HasAttr("round_type", attrs)) {
      round_type_ = OpParam::GetAttr<RoundType>("round_type", attrs);
    }
  }

 public:
3349
  GType *online_scale_;
3350
  // quantize offline scale
3351
  GType *offline_scale_;
3352 3353
  // if offine scale or not
  bool offline_ = false;
3354 3355 3356 3357 3358 3359
  // round method type
  // RoundType round_type_ = ROUND_NEAREST_AWAY_ZERO;
  RoundType round_type_ = ROUND_NEAREST_TOWARDS_ZERO;
};
#endif

3360 3361 3362 3363 3364 3365 3366 3367 3368
#ifdef SEQUENCE_EXPAND_OP
template <typename Dtype>
class SequenceExpandParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SequenceExpandParam(const VariableNameMap &inputs,
                      const VariableNameMap &outputs, const AttributeMap &attrs,
3369 3370 3371 3372 3373
                      Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3374 3375 3376 3377 3378 3379 3380 3381 3382 3383 3384 3385 3386 3387 3388 3389 3390 3391 3392 3393 3394 3395 3396
    ref_level_ = -1;
    if (OpParam::HasAttr("ref_level", attrs)) {
      ref_level_ = OpParam::GetAttr<int>("ref_level", attrs);
    }
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  int ref_level_;
};
#endif  // SEQUENCE_EXPAND_OP

#ifdef SEQUENCE_POOL_OP
template <typename Dtype>
class SequencePoolParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  SequencePoolParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3397 3398 3399 3400
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3401 3402
    pool_type_ = "MAX";
    if (OpParam::HasAttr("pooltype", attrs)) {
H
hjchen2 已提交
3403
      pool_type_ = OpParam::GetStringAttr("pooltype", attrs);
3404 3405 3406 3407 3408 3409 3410 3411 3412 3413
    }
  }

 public:
  GType *input_;
  GType *output_;
  std::string pool_type_;
};
#endif  // SEQUENCE_EXPAND_OP

3414 3415 3416 3417 3418 3419 3420 3421
#ifdef LOD_RESET_OP
template <typename Dtype>
class LodResetParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  LodResetParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3422 3423 3424 3425
                const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3426 3427
    input_y_ = nullptr;
    if (inputs.count("Y")) {
3428
      input_y_ = InputYFrom<GType>(inputs, *scope);
3429 3430 3431
    } else {
      target_lod_ = OpParam::GetAttr<vector<int>>("target_lod", attrs);
    }
Z
zp7 已提交
3432 3433 3434
    if (HasAttr("append", attrs)) {
      append = OpParam::GetAttr<bool>("append", attrs);
    }
3435 3436 3437 3438 3439 3440 3441
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  std::vector<int> target_lod_;
3442
  bool append;
3443 3444 3445
};
#endif  // LOD_RESET_OP

3446 3447 3448 3449 3450 3451 3452 3453
#ifdef LESS_THAN_OP
template <typename Dtype>
class CompareParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  CompareParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3454 3455 3456 3457 3458
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3459 3460 3461 3462 3463 3464 3465 3466 3467 3468 3469
    axis_ = OpParam::GetAttr<int>("axis", attrs);
  }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
  int axis_;
};
#endif  // LESS_THAN_OP

Z
zhaojiaying01 已提交
3470
#if defined(LOGICAL_AND_OP) || defined(LOGICAL_OR_OP) || defined(LOGICAL_XOR_OP)
3471
template <typename Dtype>
Z
zhaojiaying01 已提交
3472
class LogicalBinaryParam : public OpParam {
3473 3474 3475 3476
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
Z
zhaojiaying01 已提交
3477 3478
  LogicalBinaryParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
3479 3480 3481 3482 3483
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    input_y_ = InputYFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3484 3485 3486 3487 3488 3489 3490 3491 3492 3493 3494
  }

  const GType *InputX() const { return input_x_; }
  const GType *InputY() const { return input_y_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *input_y_;
  GType *output_;
};
Z
zhaojiaying01 已提交
3495
#endif  // LOGICAL_AND_OP LOGICAL_OR_OP LOGICAL_XOR_OP
3496 3497 3498

#ifdef LOGICAL_NOT_OP
template <typename Dtype>
Z
zhaojiaying01 已提交
3499
class LogicalUnaryParam : public OpParam {
3500 3501 3502 3503
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
Z
zhaojiaying01 已提交
3504 3505
  LogicalUnaryParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3506 3507 3508 3509
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
3510 3511 3512 3513 3514 3515 3516 3517 3518 3519 3520
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *output_;
};
#endif  // LOGICAL_NOT_OP

3521 3522 3523
#ifdef WRITE_TO_ARRAY_OP
template <typename Dtype>
class WriteToArrayParam : public OpParam {
H
hjchen2 已提交
3524 3525 3526
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

3527 3528 3529
 public:
  WriteToArrayParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
3530 3531
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
3532 3533 3534
    input_ = OpParam::GetVarValue<GType>("X", inputs, *scope);
    index_ = OpParam::GetVarValue<GType>("I", inputs, *scope);
    output_ = OpParam::GetVarValue<std::vector<GType>>("Out", outputs, *scope);
3535 3536 3537
  }

 public:
H
hjchen2 已提交
3538 3539 3540
  GType *input_;
  GType *index_;
  std::vector<GType> *output_;
3541 3542 3543 3544 3545 3546
};
#endif

#ifdef READ_FROM_ARRAY_OP
template <typename Dtype>
class ReadFromArrayParam : public OpParam {
H
hjchen2 已提交
3547 3548 3549
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

3550 3551 3552
 public:
  ReadFromArrayParam(const VariableNameMap &inputs,
                     const VariableNameMap &outputs, const AttributeMap &attrs,
3553 3554
                     Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
H
hjchen2 已提交
3555 3556 3557
    input_ = OpParam::GetVarValue<std::vector<GType>>("X", inputs, *scope);
    index_ = OpParam::GetVarValue<GType>("I", inputs, *scope);
    output_ = OpParam::GetVarValue<GType>("Out", outputs, *scope);
3558 3559 3560
  }

 public:
H
hjchen2 已提交
3561 3562 3563
  std::vector<GType> *input_;
  GType *index_;
  GType *output_;
3564 3565 3566
};
#endif

Z
zhaojiaying01 已提交
3567 3568 3569 3570 3571 3572 3573 3574
#ifdef IS_EMPTY_OP
template <typename Dtype>
class IsEmptyParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  IsEmptyParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3575 3576 3577 3578
               const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
Z
zhaojiaying01 已提交
3579 3580 3581 3582 3583 3584 3585 3586 3587 3588 3589 3590 3591 3592 3593 3594 3595 3596 3597
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }

 public:
  GType *input_x_;
  GType *output_;
};
#endif  // IS_EMPTY_OP

#ifdef INCREMENT_OP
template <typename Dtype>
class IncrementParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  IncrementParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
H
update  
hjchen2 已提交
3598
                 const AttributeMap &attrs, Scope *scope)
3599
      : OpParam(inputs, outputs, attrs, scope) {
H
update  
hjchen2 已提交
3600 3601
    input_x_ = InputXFrom<GType>(inputs, *scope);
    output_ = OutFrom<GType>(outputs, *scope);
H
update  
hjchen2 已提交
3602
    step_ = OpParam::GetAttr<float>("step", attrs);
Z
zhaojiaying01 已提交
3603 3604 3605 3606
  }

  const GType *InputX() const { return input_x_; }
  GType *Out() const { return output_; }
H
update  
hjchen2 已提交
3607
  float Step() const { return step_; }
Z
zhaojiaying01 已提交
3608 3609 3610 3611

 public:
  GType *input_x_;
  GType *output_;
H
update  
hjchen2 已提交
3612
  float step_;
Z
zhaojiaying01 已提交
3613 3614
};
#endif  // INCREMENT_OP
3615 3616
#ifdef PAD2D_OP
template <typename Dtype>
3617
class Pad2DParam : public OpParam {
3618 3619 3620 3621
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3622
  Pad2DParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
3623 3624 3625 3626
             const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
3627 3628 3629 3630
    paddings_ = OpParam::GetAttr<std::vector<int>>("paddings", attrs);
    pad_value_ = OpParam::GetAttr<float>("pad_value", attrs);
    mode_ = OpParam::GetStringAttr("mode", attrs);
    DLOG << "mode" << mode_;
3631
  }
3632 3633 3634 3635 3636 3637
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }

  std::vector<int> paddings_;
  float pad_value_;
  std::string mode_;
3638 3639

 private:
3640 3641
  GType *input_x_;
  GType *out_;
3642 3643
};
#endif
H
Huie 已提交
3644 3645 3646 3647 3648
#ifdef EXP_OP
template <typename Dtype>
class EXPParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;
Z
zhaojiaying01 已提交
3649

H
Huie 已提交
3650 3651 3652 3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663 3664
 public:
  EXPParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
           const AttributeMap &attrs, Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
  }
  const GType *InputX() const { return input_x_; }
  GType *Out() const { return out_; }

 private:
  GType *input_x_;
  GType *out_;
};
#endif
3665 3666 3667 3668 3669 3670 3671 3672 3673 3674 3675 3676 3677 3678 3679 3680 3681 3682 3683 3684 3685 3686 3687 3688 3689 3690 3691 3692 3693 3694

#ifdef PIXEL_SHUFFLE_OP
template <typename Dtype>
class PixelShuffleParam : public OpParam {
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
  PixelShuffleParam(const VariableNameMap &inputs,
                    const VariableNameMap &outputs, const AttributeMap &attrs,
                    Scope *scope)
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
    out_ = OutFrom<GType>(outputs, *scope);
    upscale_factor_ = GetAttr<int>("upscale_factor", attrs);
  }

  const GType *InputX() const { return input_x_; }

  GType *Out() const { return out_; }

  const int &upscale_factor() const { return upscale_factor_; }

 private:
  GType *input_x_;
  GType *out_;
  int upscale_factor_;
};
#endif

3695
#ifdef GRID_SAMPLER_OP
3696
template <typename Dtype>
3697
class GridSamplerParam : public OpParam {
3698 3699 3700 3701
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3702 3703 3704
  GridSamplerParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs, const AttributeMap &attrs,
                   Scope *scope)
3705 3706
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3707
    grid_ = GridFrom<GType>(inputs, *scope);
3708
    output_ = OutputFrom<GType>(outputs, *scope);
3709 3710 3711
  }

  const GType *InputX() const { return input_x_; }
3712
  const GType *Grid() const { return grid_; }
3713

3714
  GType *Output() const { return output_; }
3715 3716 3717

 private:
  GType *input_x_;
3718
  GType *grid_;
3719
  GType *output_;
3720 3721 3722
};
#endif

3723
#ifdef EXPAND_OP
3724
template <typename Dtype>
3725
class ExpandParam : public OpParam {
3726 3727 3728 3729
  typedef typename DtypeTensorTrait<Dtype>::gtype GType;
  typedef typename DtypeTensorTrait<Dtype>::rtype RType;

 public:
3730 3731
  ExpandParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const AttributeMap &attrs, Scope *scope)
3732 3733
      : OpParam(inputs, outputs, attrs, scope) {
    input_x_ = InputXFrom<GType>(inputs, *scope);
3734 3735
    out_ = OutFrom<GType>(outputs, *scope);
    expand_times = OpParam::GetAttr<std::vector<int>>("expand_times", attrs);
3736 3737 3738 3739
  }

  const GType *InputX() const { return input_x_; }

3740 3741 3742
  GType *Out() const { return out_; }

  std::vector<int> expand_times;
3743 3744 3745

 private:
  GType *input_x_;
3746
  GType *out_;
3747 3748
};

3749
#endif
朔-望's avatar
朔-望 已提交
3750 3751
}  // namespace operators
}  // namespace paddle_mobile