op_param.h 12.8 KB
Newer Older
朔-望's avatar
朔-望 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
/* Copyright (c) 2016 Baidu, Inc. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/

19
#pragma once
朔-望's avatar
朔-望 已提交
20

L
liuruilong 已提交
21
#include "common/log.h"
朔-望's avatar
朔-望 已提交
22 23 24 25 26 27 28
#include "common/type_define.h"
#include "framework/lod_tensor.h"
#include "framework/scope.h"
#include "framework/tensor.h"
#include "framework/variable.h"

namespace paddle_mobile {
朔-望's avatar
朔-望 已提交
29 30 31 32 33
namespace operators {

using namespace framework;

class OpParam : PaddleMobileObject {
朔-望's avatar
朔-望 已提交
34 35 36 37 38
  public:
  protected:
    template <typename T>
    static T *InputFrom(const VariableNameMap &inputs, const Scope &scope) {
        return GetVarValue<T>("Input", inputs, scope);
朔-望's avatar
朔-望 已提交
39
    }
朔-望's avatar
朔-望 已提交
40 41 42 43 44 45 46 47 48 49 50

    template <typename T>
    static T *InputXFrom(const VariableNameMap &inputs, const Scope &scope) {
        return GetVarValue<T>("X", inputs, scope);
    }

    template <typename T>
    static T *InputYFrom(const VariableNameMap &inputs, const Scope &scope) {
        return GetVarValue<T>("Y", inputs, scope);
    }

E
eclipsess 已提交
51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69
    template <typename T>
    static T *InputBiasFrom(const VariableNameMap &inputs, const Scope &scope) {
        return GetVarValue<T>("Bias", inputs, scope);
    }
    template <typename T>
    static T *InputVarianceFrom(const VariableNameMap &inputs,
                                const Scope &scope) {
        return GetVarValue<T>("Variance", inputs, scope);
    }
    template <typename T>
    static T *InputMeanFrom(const VariableNameMap &inputs, const Scope &scope) {
        return GetVarValue<T>("Mean", inputs, scope);
    }
    template <typename T>
    static T *InputScaleFrom(const VariableNameMap &inputs,
                             const Scope &scope) {
        return GetVarValue<T>("Scale", inputs, scope);
    }

朔-望's avatar
朔-望 已提交
70 71 72
    template <typename T>
    static std::vector<T *> InputMultiFrom(const VariableNameMap &inputs,
                                           const Scope &scope) {
E
eclipsess 已提交
73
        return GetMultiVarValue<T>("X", inputs, scope);
朔-望's avatar
朔-望 已提交
74 75 76 77 78 79 80 81 82 83 84 85
    }

    template <typename T>
    static T *OutputFrom(const VariableNameMap &outputs, const Scope &scope) {
        return GetVarValue<T>("Output", outputs, scope);
    }

    template <typename T>
    static T *OutFrom(const VariableNameMap &outputs, const Scope &scope) {
        return GetVarValue<T>("Out", outputs, scope);
    }

E
eclipsess 已提交
86 87 88 89 90
    template <typename T>
    static T *OutputYFrom(const VariableNameMap &outputs, const Scope &scope) {
        return GetVarValue<T>("Y", outputs, scope);
    }

E
eclipsess 已提交
91 92 93 94 95
    template <typename T>
    static T *MidOutFrom(const VariableNameMap &outputs, const Scope &scope) {
        return GetVarValue<T>("MidOut", outputs, scope);
    }

朔-望's avatar
朔-望 已提交
96 97 98 99 100 101
    template <typename T>
    static T *FilterFrom(const VariableNameMap &inputs, const Scope &scope) {
        return GetVarValue<T>("Filter", inputs, scope);
    }

    template <typename T>
E
eclipsess 已提交
102
    static const T GetAttr(const std::string &key, const AttributeMap &map) {
朔-望's avatar
朔-望 已提交
103 104 105 106
        return ((Attribute)map.at(key)).Get<T>();
    }

    template <typename T>
E
eclipsess 已提交
107 108
    static T *GetVarValue(const std::string &key,
                          const VariableNameMap &var_map, const Scope &scope) {
朔-望's avatar
朔-望 已提交
109
        auto var_vec = var_map.at(key);
E
eclipsess 已提交
110
        if (!var_vec.empty()) {
朔-望's avatar
朔-望 已提交
111 112 113 114 115 116 117 118 119 120
            //      std::cout << " get var value -- " << var_vec[0] <<
            //      std::endl;
            auto var = scope.FindVar(var_vec[0]);
            return var->GetMutable<T>();
        } else {
            return nullptr;
        }
    }

    template <typename T>
E
eclipsess 已提交
121
    static std::vector<T *> GetMultiVarValue(const std::string &key,
朔-望's avatar
朔-望 已提交
122 123 124 125 126 127 128 129 130 131
                                             const VariableNameMap &var_map,
                                             const Scope &scope) {
        auto var_vecs = var_map.at(key);
        assert(var_vecs.size() > 1);
        std::vector<T *> var_res;
        for (auto &var_vec : var_vecs) {
            auto var = scope.FindVar(var_vec);
            var_res.push_back(var->GetMutable<T>());
        }
        return var_res;
朔-望's avatar
朔-望 已提交
132 133 134 135
    }
};

class ConvParam : OpParam {
朔-望's avatar
朔-望 已提交
136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170
  public:
    ConvParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const framework::AttributeMap &attrs,
              const framework::Scope &scope) {
        filter_ = FilterFrom<framework::LoDTensor>(inputs, scope);
        input_ = InputFrom<framework::Tensor>(inputs, scope);
        output_ = OutputFrom<framework::Tensor>(outputs, scope);
        strides_ = GetAttr<std::vector<int>>("strides", attrs);
        paddings_ = GetAttr<std::vector<int>>("paddings", attrs);
        dilations_ = GetAttr<std::vector<int>>("dilations", attrs);
        groups = GetAttr<int>("groups", attrs);
    }

    const Tensor *Input() const { return input_; }

    const LoDTensor *Filter() const { return filter_; }

    Tensor *Output() const { return output_; }

    const std::vector<int> &Strides() const { return strides_; }

    const std::vector<int> &Paddings() const { return paddings_; }

    const std::vector<int> &Dilations() const { return dilations_; }

    const int &Groups() const { return groups; }

  private:
    Tensor *input_;
    Tensor *output_;
    LoDTensor *filter_;
    std::vector<int> strides_;
    std::vector<int> paddings_;
    std::vector<int> dilations_;
    int groups;
朔-望's avatar
朔-望 已提交
171 172 173 174 175
};

Print &operator<<(Print &printer, const ConvParam &conv_param);

class ElementwiseAddParam : OpParam {
朔-望's avatar
朔-望 已提交
176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199
  public:
    ElementwiseAddParam(const VariableNameMap &inputs,
                        const VariableNameMap &outputs,
                        const framework::AttributeMap &attrs,
                        const framework::Scope &scope) {
        input_x_ = InputXFrom<framework::Tensor>(inputs, scope);
        input_y_ = InputYFrom<framework::Tensor>(inputs, scope);
        out_ = OutFrom<framework::Tensor>(outputs, scope);
        axis_ = GetAttr<int>("axis", attrs);
    }

    const Tensor *InputX() const { return input_x_; }

    const Tensor *InputY() const { return input_y_; }

    Tensor *Out() const { return out_; }

    const int &Axis() const { return axis_; }

  private:
    Tensor *input_x_;
    Tensor *input_y_;
    Tensor *out_;
    int axis_;
朔-望's avatar
朔-望 已提交
200 201 202
};

class MulParam : OpParam {
朔-望's avatar
朔-望 已提交
203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229
  public:
    MulParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const framework::AttributeMap &attrs,
             const framework::Scope &scope) {
        input_x_ = InputXFrom<framework::Tensor>(inputs, scope);
        input_y_ = InputYFrom<framework::Tensor>(inputs, scope);
        out_ = OutFrom<framework::Tensor>(outputs, scope);
        x_num_col_dims_ = GetAttr<int>("x_num_col_dims", attrs);
        y_num_col_dims_ = GetAttr<int>("y_num_col_dims", attrs);
    }

    const Tensor *InputX() const { return input_x_; }

    const Tensor *InputY() const { return input_y_; }

    Tensor *Out() const { return out_; }

    const int &XNumColDims() const { return x_num_col_dims_; }

    const int &YNumColDims() const { return y_num_col_dims_; }

  private:
    Tensor *input_x_;
    Tensor *input_y_;
    Tensor *out_;
    int x_num_col_dims_;
    int y_num_col_dims_;
朔-望's avatar
朔-望 已提交
230 231 232
};

class ConcatParam : public OpParam {
朔-望's avatar
朔-望 已提交
233 234 235 236 237 238 239 240
  public:
    ConcatParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
                const framework::AttributeMap &attrs,
                const framework::Scope &scope) {
        inputs_ = InputMultiFrom<framework::Tensor>(inputs, scope);
        out_ = OutFrom<framework::Tensor>(outputs, scope);
        axis_ = GetAttr<int>("axis", attrs);
    }
朔-望's avatar
朔-望 已提交
241

朔-望's avatar
朔-望 已提交
242
    std::vector<Tensor *> Inputs() const { return inputs_; }
朔-望's avatar
朔-望 已提交
243

朔-望's avatar
朔-望 已提交
244
    Tensor *Out() const { return out_; }
朔-望's avatar
朔-望 已提交
245

朔-望's avatar
朔-望 已提交
246
    const int &Axis() const { return axis_; }
朔-望's avatar
朔-望 已提交
247

朔-望's avatar
朔-望 已提交
248 249 250 251
  private:
    std::vector<Tensor *> inputs_;
    Tensor *out_;
    int axis_;
朔-望's avatar
朔-望 已提交
252 253
};

E
eclipsess 已提交
254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294
class LrnParam : public OpParam {
  public:
    LrnParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
             const framework::AttributeMap &attrs,
             const framework::Scope &scope) {
        input_x_ = InputXFrom<framework::Tensor>(inputs, scope);
        out_ = OutFrom<framework::Tensor>(outputs, scope);
        mid_out_ = MidOutFrom<framework::Tensor>(outputs, scope);
        n_ = GetAttr<int>("n", attrs);
        alpha_ = GetAttr<float>("alpha", attrs);
        beta_ = GetAttr<float>("beta", attrs);
        k_ = GetAttr<float>("k", attrs);
        data_format_ = GetAttr<std::string>("data_format", attrs);
    }

    const Tensor *InputX() const { return input_x_; }

    Tensor *Out() const { return out_; }

    Tensor *MidOut() const { return mid_out_; }

    const int &N() const { return n_; }

    const float &Alpha() const { return alpha_; }

    const float &Beta() const { return beta_; }

    const float &K() const { return k_; }

    const std::string &DataFormat() const { return data_format_; }

  private:
    Tensor *input_x_;
    Tensor *out_;
    Tensor *mid_out_;
    int n_;
    float alpha_;
    float beta_;
    float k_;
    std::string data_format_;
};
E
eclipsess 已提交
295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310
class BatchNormParam : OpParam {
  public:
    BatchNormParam(const VariableNameMap &inputs,
                   const VariableNameMap &outputs,
                   const framework::AttributeMap &attrs,
                   const framework::Scope &scope) {
        input_x_ = InputXFrom<framework::Tensor>(inputs, scope);
        output_y_ = OutputYFrom<framework::Tensor>(outputs, scope);
        input_bias_ = InputBiasFrom<framework::Tensor>(inputs, scope);
        input_mean_ = InputMeanFrom<framework::Tensor>(inputs, scope);
        input_scale_ = InputScaleFrom<framework::Tensor>(inputs, scope);
        input_variance_ = InputVarianceFrom<framework::Tensor>(inputs, scope);
        epsilon_ = GetAttr<float>("epsilon", attrs);
        momentum_ = GetAttr<float>("momentum", attrs);
        is_test_ = GetAttr<bool>("is_test", attrs);
    }
E
eclipsess 已提交
311

E
eclipsess 已提交
312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343
    const Tensor *InputX() const { return input_x_; }

    Tensor *OutputY() const { return output_y_; }

    const Tensor *InputBias() const { return input_bias_; }

    const Tensor *InputMean() const { return input_mean_; }

    const Tensor *InputScale() const { return input_scale_; }

    const Tensor *InputVariance() const { return input_variance_; }

    const float &Epsilon() const { return epsilon_; }

    const float &Momentum() const { return momentum_; }

    const bool &IsTest() const { return is_test_; }

    const std::string &DataFormat() const { return data_format_; }

  private:
    Tensor *input_x_;
    Tensor *output_y_;
    Tensor *input_bias_;
    Tensor *input_mean_;
    Tensor *input_scale_;
    Tensor *input_variance_;
    float epsilon_;
    float momentum_;
    bool is_test_;
    std::string data_format_;
};
344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386
class PoolParam : public OpParam {
  public:
    PoolParam(const VariableNameMap &inputs, const VariableNameMap &outputs,
              const framework::AttributeMap &attrs,
              const framework::Scope &scope) {
        input_ = InputXFrom<framework::Tensor>(inputs, scope);

        output_ = OutFrom<framework::Tensor>(outputs, scope);
        pooling_type_ = GetAttr<std::string>("pooling_type", attrs);
        ksize_ = GetAttr<std::vector<int>>("ksize", attrs);
        strides_ = GetAttr<std::vector<int>>("strides", attrs);
        paddings_ = GetAttr<std::vector<int>>("paddings", attrs);
        ceil_mode_ = GetAttr<bool>("ceil_mode", attrs);
        gloabal_pooling_ = GetAttr<bool>("global_pooling", attrs);
    }

    const Tensor *Input() const { return input_; }

    Tensor *Output() const { return output_; }

    const std::string &PoolingType() const { return pooling_type_; }

    const std::vector<int> &Ksize() const { return ksize_; }

    const std::vector<int> &Strides() const { return strides_; }

    const std::vector<int> &Paddings() const { return paddings_; }

    bool isCeilMode() const { return ceil_mode_; }

    bool isGlobalPooling() const { return gloabal_pooling_; }

  private:
    Tensor *input_;
    Tensor *output_;
    std::string pooling_type_;
    std::vector<int> ksize_;
    std::vector<int> strides_;
    std::vector<int> paddings_;
    bool ceil_mode_;
    bool gloabal_pooling_ = false;
};

朔-望's avatar
朔-望 已提交
387
} // namespace operators
L
liuruilong 已提交
388
} // namespace paddle_mobile