while_op.cc 14.2 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yang Yang(Tony) 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yang Yang(Tony) 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yang Yang(Tony) 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yang Yang(Tony) 已提交
14 15

#include <vector>
Y
Yi Wang 已提交
16 17 18 19 20
#include "paddle/fluid/framework/executor.h"
#include "paddle/fluid/framework/lod_tensor_array.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/operator.h"
#include "paddle/fluid/operators/detail/safe_ref.h"
Y
Yang Yang(Tony) 已提交
21 22 23 24 25 26 27

namespace paddle {
namespace operators {

using StepScopeVar = std::vector<framework::Scope *>;
using LoDTensor = framework::LoDTensor;

Y
Yang Yu 已提交
28 29 30 31 32 33
static constexpr char kStepBlock[] = "sub_block";
static constexpr char kCondition[] = "Condition";
static constexpr char kStepScopes[] = "StepScopes";
static constexpr char kX[] = "X";
static constexpr char kXGRAD[] = "X@GRAD";
static constexpr char kOutputs[] = "Out";
Y
Yang Yang(Tony) 已提交
34 35 36 37 38 39 40 41

class WhileOp : public framework::OperatorBase {
 public:
  WhileOp(const std::string &type, const framework::VariableNameMap &inputs,
          const framework::VariableNameMap &outputs,
          const framework::AttributeMap &attrs)
      : framework::OperatorBase(type, inputs, outputs, attrs) {}

42 43 44
 private:
  void RunImpl(const framework::Scope &scope,
               const platform::Place &dev_place) const override {
Y
Yang Yang(Tony) 已提交
45 46 47 48
    PADDLE_ENFORCE_NOT_NULL(scope.FindVar(Input(kCondition)));
    auto &cond = scope.FindVar(Input(kCondition))->Get<LoDTensor>();
    PADDLE_ENFORCE_EQ(cond.dims(), paddle::framework::make_ddim({1}));

D
dzhwinter 已提交
49
    framework::Executor executor(dev_place);
Y
Yu Yang 已提交
50
    auto *block = Attr<framework::BlockDesc *>(kStepBlock);
D
dzhwinter 已提交
51

Y
Yang Yang(Tony) 已提交
52 53 54 55 56
    auto *program = block->Program();

    auto step_scopes =
        scope.FindVar(Output(kStepScopes))->GetMutable<StepScopeVar>();

J
JiayiFeng 已提交
57 58
    PADDLE_ENFORCE(platform::is_cpu_place(cond.place()),
                   "Condition of while op must in CPU memory.");
Y
Yang Yang(Tony) 已提交
59 60 61 62 63 64 65 66 67 68 69 70
    while (cond.data<bool>()[0]) {
      auto &current_scope = scope.NewScope();
      step_scopes->push_back(&current_scope);

      executor.Run(*program, &current_scope, block->ID(),
                   false /*create_local_scope*/);
    }
  }
};

class WhileOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
71
  WhileOpMaker(OpProto *proto, OpAttrChecker *op_checker)
Y
Yang Yang(Tony) 已提交
72
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yang Yu 已提交
73
    AddInput(kX,
Y
Yang Yang(Tony) 已提交
74 75 76 77 78 79 80
             "A set of variables, which are required by operators inside the "
             "block of While Op.")
        .AsDuplicable();
    AddInput(
        kCondition,
        "(Bool) An scalar. When it's False, the While Op will be terminated.")
        .AsDuplicable();
Y
Yang Yang(Tony) 已提交
81
    AddOutput(kOutputs,
Y
Yang Yang(Tony) 已提交
82
              "A set of variables, which will be assigned with values "
Y
Yang Yang(Tony) 已提交
83
              "generated by the operators inside the block of While Op.")
Y
Yang Yang(Tony) 已提交
84 85 86 87 88
        .AsDuplicable();
    AddOutput(kStepScopes,
              "(StepScopeVar) A vector of local scope, which size equals the "
              "step number of While Op. The i'th scope storages temporary "
              "variables generated in the i'th step.");
Y
Yu Yang 已提交
89 90
    AddAttr<framework::BlockDesc *>(kStepBlock,
                                    "The step block inside WhileOp");
Y
Yang Yang(Tony) 已提交
91 92 93 94 95 96 97 98 99 100 101 102
    AddComment(R"DOC(
)DOC");
  }
};

class WhileGradOp : public framework::OperatorBase {
 public:
  WhileGradOp(const std::string &type, const framework::VariableNameMap &inputs,
              const framework::VariableNameMap &outputs,
              const framework::AttributeMap &attrs)
      : framework::OperatorBase(type, inputs, outputs, attrs) {}

103 104 105
 private:
  void RunImpl(const framework::Scope &scope,
               const platform::Place &dev_place) const override {
106 107 108
    // get device context from pool
    platform::DeviceContextPool &pool = platform::DeviceContextPool::Instance();
    auto &dev_ctx = *pool.Get(dev_place);
D
dzhwinter 已提交
109
    framework::Executor executor(dev_place);
Y
Yu Yang 已提交
110
    auto *block = Attr<framework::BlockDesc *>(kStepBlock);
Y
Yang Yang(Tony) 已提交
111 112 113 114 115
    auto *program = block->Program();

    auto *step_scopes =
        scope.FindVar(Input(kStepScopes))->GetMutable<StepScopeVar>();

Y
Yang Yang(Tony) 已提交
116 117 118 119 120 121
    auto outside_og_names = Inputs(framework::GradVarName(kOutputs));
    auto inside_og_names =
        Attr<std::vector<std::string>>("original_output_grad");

    PADDLE_ENFORCE_EQ(outside_og_names.size(), inside_og_names.size());

Y
Yang Yang(Tony) 已提交
122 123
    for (auto cur_scope_iter = step_scopes->rbegin();
         cur_scope_iter != step_scopes->rend(); ++cur_scope_iter) {
Y
Yang Yang(Tony) 已提交
124 125 126 127 128 129 130
      VLOG(3) << "Start backward at time_step "
              << cur_scope_iter - step_scopes->rbegin();
      framework::Scope &cur_scope = **cur_scope_iter;
      // Link OG from outside to inside
      for (size_t i = 0; i < outside_og_names.size(); ++i) {
        auto outside_og_name = outside_og_names[i];
        auto inside_og_name = inside_og_names[i];
131 132
        VLOG(8) << "Linking outside " << outside_og_name << " --> inside "
                << inside_og_name;
133 134 135 136 137 138
        auto &og_outside =
            detail::Ref(scope.FindVar(outside_og_name),
                        "Cannot find Outside Gradient %s", outside_og_name);
        auto &og_inside =
            detail::Ref(cur_scope.Var(inside_og_name),
                        "Cannot find inside gradient %s", inside_og_name);
Y
Yang Yang(Tony) 已提交
139 140 141 142 143 144 145 146 147 148 149 150
        if (og_outside.Type().hash_code() ==
            typeid(framework::LoDTensor).hash_code()) {
          auto &outside_tensor = og_outside.Get<framework::LoDTensor>();
          auto &inside_tensor =
              detail::Ref(og_inside.GetMutable<framework::LoDTensor>());
          inside_tensor.set_lod(outside_tensor.lod());
          inside_tensor.ShareDataWith(outside_tensor);
        } else if (og_outside.Type().hash_code() ==
                   typeid(framework::LoDTensorArray).hash_code()) {
          auto &outside_array = og_outside.Get<framework::LoDTensorArray>();
          auto &inside_array =
              detail::Ref(og_inside.GetMutable<framework::LoDTensorArray>());
151
          VLOG(8) << outside_og_name << " size = " << outside_array.size();
Y
Yang Yang(Tony) 已提交
152 153 154
          inside_array.resize(outside_array.size());

          for (size_t j = 0; j < inside_array.size(); ++j) {
155
            VLOG(8) << j << " " << outside_array[j].numel();
Y
Yang Yang(Tony) 已提交
156 157 158 159 160 161 162 163 164 165
            if (outside_array[j].numel() != 0) {
              inside_array[j].set_lod(outside_array[j].lod());
              inside_array[j].ShareDataWith(outside_array[j]);
            } else {
              PADDLE_ENFORCE_EQ(inside_array[j].numel(), 0);
            }
          }
        }
      }

Y
Yang Yang(Tony) 已提交
166 167
      executor.Run(*program, *cur_scope_iter, block->ID(), false);

Y
Yang Yu 已提交
168 169
      auto &pg_names = Outputs(kXGRAD);
      auto &p_names = Inputs(kX);
Y
Yang Yang(Tony) 已提交
170
      PADDLE_ENFORCE_EQ(pg_names.size(), p_names.size());
Y
Yang Yang(Tony) 已提交
171 172
      for (size_t param_id = 0; param_id < pg_names.size(); ++param_id) {
        if (pg_names[param_id] == framework::kEmptyVarName) {
173
          continue;  // parameter doesn't have gradient
Y
Yang Yang(Tony) 已提交
174 175
        }
        auto inside_grad_name = framework::GradVarName(p_names[param_id]);
Y
Yang Yang(Tony) 已提交
176

Y
Yang Yang(Tony) 已提交
177
        //  // TODO(tonyyang-svail): Not sure we need the following
Y
Yang Yang(Tony) 已提交
178 179 180 181 182 183 184 185 186 187 188
        //  // If does not compute gradient of that variable inside rnn,
        //  just
        //  // continue
        //  if (local_var_names.find(inside_grad_name) ==
        //  local_var_names.end()) {
        //    continue;
        //  }

        // zero gradient variable in step 0
        if (cur_scope_iter == step_scopes->rbegin()) {
          auto *var = (*cur_scope_iter)->FindVar(inside_grad_name);
Y
Yang Yang(Tony) 已提交
189
          PADDLE_ENFORCE_NOT_NULL(var, "Can not find var %s", inside_grad_name);
Y
Yang Yang(Tony) 已提交
190 191 192
          if (var->IsType<LoDTensor>()) {
            auto &inside_tensor = var->Get<framework::LoDTensor>();
            framework::AttributeMap attrs;
F
fengjiayi 已提交
193
            attrs["dtype"] = framework::ToDataType(inside_tensor.type());
Y
Yang Yang(Tony) 已提交
194 195 196
            attrs["shape"] = framework::vectorize2int(inside_tensor.dims());
            attrs["value"] = 0.0f;

197
            auto var_name = pg_names[param_id];
Y
Yang Yang(Tony) 已提交
198
            auto zero_op = framework::OpRegistry::CreateOp(
Y
Yiqun Liu 已提交
199
                "fill_constant", framework::VariableNameMap{},
200
                {{"Out", {var_name}}}, attrs);
D
dzhwinter 已提交
201
            zero_op->Run(scope, dev_place);
202 203 204
            scope.FindVar(var_name)
                ->GetMutable<framework::LoDTensor>()
                ->set_lod(inside_tensor.lod());
Y
Yang Yang(Tony) 已提交
205 206 207
          }
        }

Y
Yang Yang(Tony) 已提交
208
        auto new_inside_name = cur_scope.Rename(inside_grad_name);
Y
Yang Yang(Tony) 已提交
209
        auto sum_op = framework::OpRegistry::CreateOp(
Y
Yang Yang(Tony) 已提交
210
            "sum", {{"X", {pg_names[param_id], new_inside_name}}},
Y
Yiqun Liu 已提交
211
            {{"Out", {pg_names[param_id]}}}, framework::AttributeMap{});
D
dzhwinter 已提交
212
        sum_op->Run(cur_scope, dev_place);
Y
Yang Yang(Tony) 已提交
213
        cur_scope.Rename(new_inside_name, inside_grad_name);
Y
Yang Yang(Tony) 已提交
214
      }
215 216
      dev_ctx.Wait();
      const_cast<framework::Scope &>(scope).DeleteScope(&cur_scope);
Y
Yang Yang(Tony) 已提交
217 218 219 220 221 222 223 224 225
    }
  }
};

class WhileGradOpDescMaker : public framework::SingleGradOpDescMaker {
 public:
  using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;

 protected:
Y
Yu Yang 已提交
226
  std::unique_ptr<framework::OpDesc> Apply() const override {
F
Update  
fengjiayi 已提交
227 228 229 230 231 232 233
    auto *while_grad = new framework::OpDesc();
    while_grad->SetType("while_grad");
    while_grad->SetInput(kX, Input(kX));
    while_grad->SetInput(kOutputs, Output(kOutputs));
    while_grad->SetInput(kStepScopes, Output(kStepScopes));

    auto *grad_block = this->grad_block_[0];
Y
Yu Yang 已提交
234 235
    auto *fwd_block = grad_block->ForwardBlock();
    auto *parent_block = grad_block->ParentBlock();
236 237 238

    // Not all of IGs will be generated by inner gradient operators of while op.
    // Ignore IGs that is not generated by the inside block.
F
Update  
fengjiayi 已提交
239 240 241 242
    std::unordered_set<std::string> inner_op_outputs;
    for (const auto *op : grad_block->AllOps()) {
      for (auto &oname : op->OutputArgumentNames()) {
        inner_op_outputs.insert(oname);
243 244
      }
    }
F
Update  
fengjiayi 已提交
245
    auto igs = InputGrad(kX, /*do not drop empty gradient*/ false);
246
    for (auto &each_ig : igs) {
F
Update  
fengjiayi 已提交
247
      if (inner_op_outputs.find(each_ig) == inner_op_outputs.end()) {
248
        VLOG(8) << "Ignore " << each_ig;
249 250 251
        each_ig = framework::kEmptyVarName;
      }
    }
F
Update  
fengjiayi 已提交
252
    while_grad->SetOutput(framework::GradVarName(kX), igs);
Y
Yang Yang(Tony) 已提交
253 254 255 256

    // OG should be re-calculated by step blocks, since many outputs of while op
    // do not need to calculate gradients.
    std::unordered_set<std::string> block_ins;
F
fengjiayi 已提交
257 258 259 260 261 262 263
    block_ins.reserve(Input(kX).size() + Output(kOutputs).size());
    for (auto &p : Input(kX)) {
      block_ins.insert(p);
    }
    for (auto &o : Output(kOutputs)) {
      block_ins.insert(o);
    }
Y
Yu Yang 已提交
264
    std::unordered_set<std::string> output_grads;
F
Update  
fengjiayi 已提交
265 266 267 268
    for (const auto *op : grad_block->AllOps()) {
      for (auto &input_name : op->InputArgumentNames()) {
        // If the input of Op has been recorded or is generated by the forward
        // block, do not make it as input again.
Y
Yu Yang 已提交
269 270 271

        // The input is located in I/O or other op's outputs or the variable is
        // located in grad_block's parents
F
Update  
fengjiayi 已提交
272
        if (block_ins.find(input_name) != block_ins.end() ||
Y
Yu Yang 已提交
273 274
            (fwd_block->FindVarRecursive(input_name) != nullptr ||
             parent_block->FindVarRecursive(input_name) != nullptr)) {
Y
Yang Yang(Tony) 已提交
275 276
          continue;
        }
Y
Yu Yang 已提交
277
        output_grads.insert(input_name);
Y
Yang Yang(Tony) 已提交
278
      }
F
Update  
fengjiayi 已提交
279
      for (auto &output_name : op->OutputArgumentNames()) {
Y
Yang Yang(Tony) 已提交
280
        block_ins.insert(output_name);
Y
Yang Yang(Tony) 已提交
281 282
      }
    }
Y
Yang Yang(Tony) 已提交
283

Y
Yu Yang 已提交
284 285 286 287 288
    std::vector<std::string> output_grads_list;
    output_grads_list.resize(output_grads.size());
    std::copy(output_grads.begin(), output_grads.end(),
              output_grads_list.begin());
    while_grad->SetInput(framework::GradVarName(kOutputs), output_grads_list);
F
Update  
fengjiayi 已提交
289 290 291

    while_grad->SetAttrMap(this->Attrs());
    while_grad->SetBlockAttr(kStepBlock, *grad_block);
Y
Yang Yang(Tony) 已提交
292 293
    // record the original output gradient names, since the gradient name of
    // while operator could be renamed.
Y
Yu Yang 已提交
294
    while_grad->SetAttr("original_output_grad", output_grads_list);
Y
Yang Yang(Tony) 已提交
295

F
Update  
fengjiayi 已提交
296
    return std::unique_ptr<framework::OpDesc>(while_grad);
Y
Yang Yang(Tony) 已提交
297 298 299
  }
};

Y
Yang Yang(Tony) 已提交
300 301
class WhileGradOpVarTypeInference : public framework::VarTypeInference {
 public:
Y
Yu Yang 已提交
302 303
  void operator()(const framework::OpDesc &op_desc,
                  framework::BlockDesc *block) const override {
Y
Yang Yu 已提交
304 305
    auto p_names = op_desc.Input(kX);
    auto pg_names = op_desc.Output(framework::GradVarName(kX));
Y
Yang Yang(Tony) 已提交
306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322

    for (size_t i = 0; i < p_names.size(); ++i) {
      auto &p_var = detail::Ref(block->FindVarRecursive(p_names[i]));
      auto *g_var = block->FindVarRecursive(pg_names[i]);
      if (g_var != nullptr) {  // Gradient could be @EMPTY@
        VLOG(5) << "Setting " << pg_names[i] << " following " << p_names[i]
                << " type: " << p_var.GetType();
        g_var->SetType(p_var.GetType());
        g_var->SetDataType(p_var.GetDataType());
      }
    }
  }
};

class WhileGradOpShapeInference : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext *ctx) const override {
Y
Yang Yu 已提交
323 324
    ctx->HasInputs(kX);
    ctx->HasOutputs(framework::GradVarName(kX));
Y
Yang Yang(Tony) 已提交
325 326 327
    ctx->HasInputs(kOutputs);
    ctx->HasInputs(framework::GradVarName(kOutputs));

Y
Yang Yu 已提交
328 329 330
    auto p_names = ctx->Inputs(kX);
    auto pg_names = ctx->Outputs(kXGRAD);
    auto var_types = ctx->GetInputsVarType(kX);
Y
Yang Yang(Tony) 已提交
331 332 333 334 335 336
    std::vector<std::string> names_to_set;
    std::vector<framework::DDim> dims_to_set;
    for (size_t i = 0; i < p_names.size(); ++i) {
      if (pg_names[i] == framework::kEmptyVarName) {
        continue;
      }
Y
Yang Yu 已提交
337
      auto dims = ctx->GetInputsElementDim(kX, i);
338
      if (var_types[i] == framework::proto::VarType::LOD_TENSOR) {
Y
Yang Yang(Tony) 已提交
339
        names_to_set.push_back(pg_names[i]);
F
fengjiayi 已提交
340
        dims_to_set.push_back(dims);
341
      } else if (var_types[i] == framework::proto::VarType::LOD_TENSOR_ARRAY) {
Y
Yang Yang(Tony) 已提交
342 343
        // not sure how to set the dim of LOD_TENSOR_ARRAY
        names_to_set.push_back(pg_names[i]);
F
fengjiayi 已提交
344
        dims_to_set.push_back(dims);
Y
Yang Yang(Tony) 已提交
345 346 347 348 349 350
      }
    }
    ctx->SetDims(names_to_set, dims_to_set);
  }
};

Y
Yang Yang(Tony) 已提交
351 352 353 354 355 356
}  // namespace operators
}  // namespace paddle

REGISTER_OPERATOR(while, paddle::operators::WhileOp,
                  paddle::operators::WhileOpMaker,
                  paddle::operators::WhileGradOpDescMaker);
Y
Yang Yang(Tony) 已提交
357 358 359
REGISTER_OPERATOR(while_grad, paddle::operators::WhileGradOp,
                  paddle::operators::WhileGradOpShapeInference,
                  paddle::operators::WhileGradOpVarTypeInference);