while_op.cc 13.6 KB
Newer Older
Y
Yang Yang(Tony) 已提交
1 2
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yang Yang(Tony) 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yang Yang(Tony) 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yang Yang(Tony) 已提交
14 15 16

#include <vector>
#include "paddle/framework/executor.h"
Y
Yang Yang(Tony) 已提交
17
#include "paddle/framework/lod_tensor_array.h"
Y
Yang Yang(Tony) 已提交
18 19
#include "paddle/framework/op_registry.h"
#include "paddle/framework/operator.h"
Y
Yang Yang(Tony) 已提交
20
#include "paddle/operators/detail/safe_ref.h"
Y
Yang Yang(Tony) 已提交
21 22 23 24 25 26 27

namespace paddle {
namespace operators {

using StepScopeVar = std::vector<framework::Scope *>;
using LoDTensor = framework::LoDTensor;

Y
Yang Yu 已提交
28 29 30 31 32 33
static constexpr char kStepBlock[] = "sub_block";
static constexpr char kCondition[] = "Condition";
static constexpr char kStepScopes[] = "StepScopes";
static constexpr char kX[] = "X";
static constexpr char kXGRAD[] = "X@GRAD";
static constexpr char kOutputs[] = "Out";
Y
Yang Yang(Tony) 已提交
34 35 36 37 38 39 40 41 42

class WhileOp : public framework::OperatorBase {
 public:
  WhileOp(const std::string &type, const framework::VariableNameMap &inputs,
          const framework::VariableNameMap &outputs,
          const framework::AttributeMap &attrs)
      : framework::OperatorBase(type, inputs, outputs, attrs) {}

  void Run(const framework::Scope &scope,
D
dzhwinter 已提交
43
           const platform::Place &dev_place) const override {
Y
Yang Yang(Tony) 已提交
44 45 46 47
    PADDLE_ENFORCE_NOT_NULL(scope.FindVar(Input(kCondition)));
    auto &cond = scope.FindVar(Input(kCondition))->Get<LoDTensor>();
    PADDLE_ENFORCE_EQ(cond.dims(), paddle::framework::make_ddim({1}));

D
dzhwinter 已提交
48
    framework::Executor executor(dev_place);
Y
Yu Yang 已提交
49
    auto *block = Attr<framework::BlockDesc *>(kStepBlock);
D
dzhwinter 已提交
50

Y
Yang Yang(Tony) 已提交
51 52 53 54 55
    auto *program = block->Program();

    auto step_scopes =
        scope.FindVar(Output(kStepScopes))->GetMutable<StepScopeVar>();

Y
Yang Yu 已提交
56 57
    PADDLE_ENFORCE(platform::is_cpu_place(cond.place()),
                   "Condition of while op must in CPU memory.");
Y
Yang Yang(Tony) 已提交
58 59 60 61 62 63 64 65 66 67 68 69
    while (cond.data<bool>()[0]) {
      auto &current_scope = scope.NewScope();
      step_scopes->push_back(&current_scope);

      executor.Run(*program, &current_scope, block->ID(),
                   false /*create_local_scope*/);
    }
  }
};

class WhileOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
70
  WhileOpMaker(OpProto *proto, OpAttrChecker *op_checker)
Y
Yang Yang(Tony) 已提交
71
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yang Yu 已提交
72
    AddInput(kX,
Y
Yang Yang(Tony) 已提交
73 74 75 76 77 78 79
             "A set of variables, which are required by operators inside the "
             "block of While Op.")
        .AsDuplicable();
    AddInput(
        kCondition,
        "(Bool) An scalar. When it's False, the While Op will be terminated.")
        .AsDuplicable();
Y
Yang Yang(Tony) 已提交
80
    AddOutput(kOutputs,
Y
Yang Yang(Tony) 已提交
81
              "A set of variables, which will be assigned with values "
Y
Yang Yang(Tony) 已提交
82
              "generated by the operators inside the block of While Op.")
Y
Yang Yang(Tony) 已提交
83 84 85 86 87
        .AsDuplicable();
    AddOutput(kStepScopes,
              "(StepScopeVar) A vector of local scope, which size equals the "
              "step number of While Op. The i'th scope storages temporary "
              "variables generated in the i'th step.");
Y
Yu Yang 已提交
88 89
    AddAttr<framework::BlockDesc *>(kStepBlock,
                                    "The step block inside WhileOp");
Y
Yang Yang(Tony) 已提交
90 91 92 93 94 95 96 97 98 99 100 101 102
    AddComment(R"DOC(
)DOC");
  }
};

class WhileGradOp : public framework::OperatorBase {
 public:
  WhileGradOp(const std::string &type, const framework::VariableNameMap &inputs,
              const framework::VariableNameMap &outputs,
              const framework::AttributeMap &attrs)
      : framework::OperatorBase(type, inputs, outputs, attrs) {}

  void Run(const framework::Scope &scope,
D
dzhwinter 已提交
103 104
           const platform::Place &dev_place) const override {
    framework::Executor executor(dev_place);
Y
Yu Yang 已提交
105
    auto *block = Attr<framework::BlockDesc *>(kStepBlock);
Y
Yang Yang(Tony) 已提交
106 107 108 109 110
    auto *program = block->Program();

    auto *step_scopes =
        scope.FindVar(Input(kStepScopes))->GetMutable<StepScopeVar>();

Y
Yang Yang(Tony) 已提交
111 112 113 114 115 116
    auto outside_og_names = Inputs(framework::GradVarName(kOutputs));
    auto inside_og_names =
        Attr<std::vector<std::string>>("original_output_grad");

    PADDLE_ENFORCE_EQ(outside_og_names.size(), inside_og_names.size());

Y
Yang Yang(Tony) 已提交
117 118
    for (auto cur_scope_iter = step_scopes->rbegin();
         cur_scope_iter != step_scopes->rend(); ++cur_scope_iter) {
Y
Yang Yang(Tony) 已提交
119 120 121 122 123 124 125
      VLOG(3) << "Start backward at time_step "
              << cur_scope_iter - step_scopes->rbegin();
      framework::Scope &cur_scope = **cur_scope_iter;
      // Link OG from outside to inside
      for (size_t i = 0; i < outside_og_names.size(); ++i) {
        auto outside_og_name = outside_og_names[i];
        auto inside_og_name = inside_og_names[i];
126 127
        VLOG(8) << "Linking outside " << outside_og_name << " --> inside "
                << inside_og_name;
128 129 130 131 132 133
        auto &og_outside =
            detail::Ref(scope.FindVar(outside_og_name),
                        "Cannot find Outside Gradient %s", outside_og_name);
        auto &og_inside =
            detail::Ref(cur_scope.Var(inside_og_name),
                        "Cannot find inside gradient %s", inside_og_name);
Y
Yang Yang(Tony) 已提交
134 135 136 137 138 139 140 141 142 143 144 145
        if (og_outside.Type().hash_code() ==
            typeid(framework::LoDTensor).hash_code()) {
          auto &outside_tensor = og_outside.Get<framework::LoDTensor>();
          auto &inside_tensor =
              detail::Ref(og_inside.GetMutable<framework::LoDTensor>());
          inside_tensor.set_lod(outside_tensor.lod());
          inside_tensor.ShareDataWith(outside_tensor);
        } else if (og_outside.Type().hash_code() ==
                   typeid(framework::LoDTensorArray).hash_code()) {
          auto &outside_array = og_outside.Get<framework::LoDTensorArray>();
          auto &inside_array =
              detail::Ref(og_inside.GetMutable<framework::LoDTensorArray>());
146
          VLOG(8) << outside_og_name << " size = " << outside_array.size();
Y
Yang Yang(Tony) 已提交
147 148 149
          inside_array.resize(outside_array.size());

          for (size_t j = 0; j < inside_array.size(); ++j) {
150
            VLOG(8) << j << " " << outside_array[j].numel();
Y
Yang Yang(Tony) 已提交
151 152 153 154 155 156 157 158 159 160
            if (outside_array[j].numel() != 0) {
              inside_array[j].set_lod(outside_array[j].lod());
              inside_array[j].ShareDataWith(outside_array[j]);
            } else {
              PADDLE_ENFORCE_EQ(inside_array[j].numel(), 0);
            }
          }
        }
      }

Y
Yang Yang(Tony) 已提交
161 162
      executor.Run(*program, *cur_scope_iter, block->ID(), false);

Y
Yang Yu 已提交
163 164
      auto &pg_names = Outputs(kXGRAD);
      auto &p_names = Inputs(kX);
Y
Yang Yang(Tony) 已提交
165
      PADDLE_ENFORCE_EQ(pg_names.size(), p_names.size());
Y
Yang Yang(Tony) 已提交
166 167
      for (size_t param_id = 0; param_id < pg_names.size(); ++param_id) {
        if (pg_names[param_id] == framework::kEmptyVarName) {
168
          continue;  // parameter doesn't have gradient
Y
Yang Yang(Tony) 已提交
169 170
        }
        auto inside_grad_name = framework::GradVarName(p_names[param_id]);
Y
Yang Yang(Tony) 已提交
171

Y
Yang Yang(Tony) 已提交
172
        //  // TODO(tonyyang-svail): Not sure we need the following
Y
Yang Yang(Tony) 已提交
173 174 175 176 177 178 179 180 181 182 183
        //  // If does not compute gradient of that variable inside rnn,
        //  just
        //  // continue
        //  if (local_var_names.find(inside_grad_name) ==
        //  local_var_names.end()) {
        //    continue;
        //  }

        // zero gradient variable in step 0
        if (cur_scope_iter == step_scopes->rbegin()) {
          auto *var = (*cur_scope_iter)->FindVar(inside_grad_name);
Y
Yang Yang(Tony) 已提交
184
          PADDLE_ENFORCE_NOT_NULL(var, "Can not find var %s", inside_grad_name);
Y
Yang Yang(Tony) 已提交
185 186 187
          if (var->IsType<LoDTensor>()) {
            auto &inside_tensor = var->Get<framework::LoDTensor>();
            framework::AttributeMap attrs;
F
fengjiayi 已提交
188
            attrs["dtype"] = framework::ToDataType(inside_tensor.type());
Y
Yang Yang(Tony) 已提交
189 190 191
            attrs["shape"] = framework::vectorize2int(inside_tensor.dims());
            attrs["value"] = 0.0f;

192
            auto var_name = pg_names[param_id];
Y
Yang Yang(Tony) 已提交
193
            auto zero_op = framework::OpRegistry::CreateOp(
Y
Yiqun Liu 已提交
194
                "fill_constant", framework::VariableNameMap{},
195
                {{"Out", {var_name}}}, attrs);
D
dzhwinter 已提交
196
            zero_op->Run(scope, dev_place);
197 198 199
            scope.FindVar(var_name)
                ->GetMutable<framework::LoDTensor>()
                ->set_lod(inside_tensor.lod());
Y
Yang Yang(Tony) 已提交
200 201 202
          }
        }

Y
Yang Yang(Tony) 已提交
203
        auto new_inside_name = cur_scope.Rename(inside_grad_name);
Y
Yang Yang(Tony) 已提交
204
        auto sum_op = framework::OpRegistry::CreateOp(
Y
Yang Yang(Tony) 已提交
205
            "sum", {{"X", {pg_names[param_id], new_inside_name}}},
Y
Yiqun Liu 已提交
206
            {{"Out", {pg_names[param_id]}}}, framework::AttributeMap{});
D
dzhwinter 已提交
207
        sum_op->Run(cur_scope, dev_place);
Y
Yang Yang(Tony) 已提交
208
        cur_scope.Rename(new_inside_name, inside_grad_name);
Y
Yang Yang(Tony) 已提交
209 210 211 212 213 214 215 216 217 218
      }
    }
  }
};

class WhileGradOpDescMaker : public framework::SingleGradOpDescMaker {
 public:
  using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;

 protected:
Y
Yu Yang 已提交
219
  std::unique_ptr<framework::OpDesc> Apply() const override {
F
Update  
fengjiayi 已提交
220 221 222 223 224 225 226 227
    auto *while_grad = new framework::OpDesc();
    while_grad->SetType("while_grad");
    while_grad->SetInput(kX, Input(kX));
    while_grad->SetInput(kOutputs, Output(kOutputs));
    while_grad->SetInput(kStepScopes, Output(kStepScopes));

    auto *grad_block = this->grad_block_[0];
    auto *fwd_block = grad_block->ParentBlock();
228 229 230

    // Not all of IGs will be generated by inner gradient operators of while op.
    // Ignore IGs that is not generated by the inside block.
F
Update  
fengjiayi 已提交
231 232 233 234
    std::unordered_set<std::string> inner_op_outputs;
    for (const auto *op : grad_block->AllOps()) {
      for (auto &oname : op->OutputArgumentNames()) {
        inner_op_outputs.insert(oname);
235 236
      }
    }
F
Update  
fengjiayi 已提交
237
    auto igs = InputGrad(kX, /*do not drop empty gradient*/ false);
238
    for (auto &each_ig : igs) {
F
Update  
fengjiayi 已提交
239
      if (inner_op_outputs.find(each_ig) == inner_op_outputs.end()) {
240
        VLOG(8) << "Ignore " << each_ig;
241 242 243
        each_ig = framework::kEmptyVarName;
      }
    }
F
Update  
fengjiayi 已提交
244
    while_grad->SetOutput(framework::GradVarName(kX), igs);
Y
Yang Yang(Tony) 已提交
245 246 247 248

    // OG should be re-calculated by step blocks, since many outputs of while op
    // do not need to calculate gradients.
    std::unordered_set<std::string> block_ins;
F
fengjiayi 已提交
249 250 251 252 253 254 255
    block_ins.reserve(Input(kX).size() + Output(kOutputs).size());
    for (auto &p : Input(kX)) {
      block_ins.insert(p);
    }
    for (auto &o : Output(kOutputs)) {
      block_ins.insert(o);
    }
Y
Yang Yang(Tony) 已提交
256
    std::unordered_set<std::string> extra_inputs;
F
Update  
fengjiayi 已提交
257 258 259 260 261 262
    for (const auto *op : grad_block->AllOps()) {
      for (auto &input_name : op->InputArgumentNames()) {
        // If the input of Op has been recorded or is generated by the forward
        // block, do not make it as input again.
        if (block_ins.find(input_name) != block_ins.end() ||
            fwd_block->FindVar(input_name) != nullptr) {
Y
Yang Yang(Tony) 已提交
263 264 265 266
          continue;
        }
        extra_inputs.insert(input_name);
      }
F
Update  
fengjiayi 已提交
267
      for (auto &output_name : op->OutputArgumentNames()) {
Y
Yang Yang(Tony) 已提交
268
        block_ins.insert(output_name);
Y
Yang Yang(Tony) 已提交
269 270
      }
    }
Y
Yang Yang(Tony) 已提交
271 272 273 274 275

    std::vector<std::string> extra_inputs_list;
    extra_inputs_list.resize(extra_inputs.size());
    std::copy(extra_inputs.begin(), extra_inputs.end(),
              extra_inputs_list.begin());
F
Update  
fengjiayi 已提交
276 277 278 279
    while_grad->SetInput(framework::GradVarName(kOutputs), extra_inputs_list);

    while_grad->SetAttrMap(this->Attrs());
    while_grad->SetBlockAttr(kStepBlock, *grad_block);
Y
Yang Yang(Tony) 已提交
280 281
    // record the original output gradient names, since the gradient name of
    // while operator could be renamed.
F
Update  
fengjiayi 已提交
282
    while_grad->SetAttr("original_output_grad", extra_inputs_list);
Y
Yang Yang(Tony) 已提交
283

F
Update  
fengjiayi 已提交
284
    return std::unique_ptr<framework::OpDesc>(while_grad);
Y
Yang Yang(Tony) 已提交
285 286 287
  }
};

Y
Yang Yang(Tony) 已提交
288 289
class WhileGradOpVarTypeInference : public framework::VarTypeInference {
 public:
Y
Yu Yang 已提交
290 291
  void operator()(const framework::OpDesc &op_desc,
                  framework::BlockDesc *block) const override {
Y
Yang Yu 已提交
292 293
    auto p_names = op_desc.Input(kX);
    auto pg_names = op_desc.Output(framework::GradVarName(kX));
Y
Yang Yang(Tony) 已提交
294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310

    for (size_t i = 0; i < p_names.size(); ++i) {
      auto &p_var = detail::Ref(block->FindVarRecursive(p_names[i]));
      auto *g_var = block->FindVarRecursive(pg_names[i]);
      if (g_var != nullptr) {  // Gradient could be @EMPTY@
        VLOG(5) << "Setting " << pg_names[i] << " following " << p_names[i]
                << " type: " << p_var.GetType();
        g_var->SetType(p_var.GetType());
        g_var->SetDataType(p_var.GetDataType());
      }
    }
  }
};

class WhileGradOpShapeInference : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext *ctx) const override {
Y
Yang Yu 已提交
311 312
    ctx->HasInputs(kX);
    ctx->HasOutputs(framework::GradVarName(kX));
Y
Yang Yang(Tony) 已提交
313 314 315
    ctx->HasInputs(kOutputs);
    ctx->HasInputs(framework::GradVarName(kOutputs));

Y
Yang Yu 已提交
316 317 318
    auto p_names = ctx->Inputs(kX);
    auto pg_names = ctx->Outputs(kXGRAD);
    auto var_types = ctx->GetInputsVarType(kX);
Y
Yang Yang(Tony) 已提交
319 320 321 322 323 324
    std::vector<std::string> names_to_set;
    std::vector<framework::DDim> dims_to_set;
    for (size_t i = 0; i < p_names.size(); ++i) {
      if (pg_names[i] == framework::kEmptyVarName) {
        continue;
      }
Y
Yang Yu 已提交
325
      auto dims = ctx->GetInputsElementDim(kX, i);
326
      if (var_types[i] == framework::proto::VarDesc::LOD_TENSOR) {
Y
Yang Yang(Tony) 已提交
327
        names_to_set.push_back(pg_names[i]);
F
fengjiayi 已提交
328
        dims_to_set.push_back(dims);
329
      } else if (var_types[i] == framework::proto::VarDesc::LOD_TENSOR_ARRAY) {
Y
Yang Yang(Tony) 已提交
330 331
        // not sure how to set the dim of LOD_TENSOR_ARRAY
        names_to_set.push_back(pg_names[i]);
F
fengjiayi 已提交
332
        dims_to_set.push_back(dims);
Y
Yang Yang(Tony) 已提交
333 334 335 336 337 338
      }
    }
    ctx->SetDims(names_to_set, dims_to_set);
  }
};

Y
Yang Yang(Tony) 已提交
339 340 341 342 343 344
}  // namespace operators
}  // namespace paddle

REGISTER_OPERATOR(while, paddle::operators::WhileOp,
                  paddle::operators::WhileOpMaker,
                  paddle::operators::WhileGradOpDescMaker);
Y
Yang Yang(Tony) 已提交
345 346 347
REGISTER_OPERATOR(while_grad, paddle::operators::WhileGradOp,
                  paddle::operators::WhileGradOpShapeInference,
                  paddle::operators::WhileGradOpVarTypeInference);