conditional_block_op.cc 7.3 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yu Yang 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yu Yang 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yu Yang 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
14 15

#include "paddle/fluid/operators/controlflow/conditional_block_op.h"
Y
Yu Yang 已提交
16 17 18 19 20 21 22 23 24 25 26

namespace paddle {
namespace operators {

class ConditionalBlockOp : public ConditionalOp {
 public:
  ConditionalBlockOp(const std::string &type,
                     const framework::VariableNameMap &inputs,
                     const framework::VariableNameMap &outputs,
                     const framework::AttributeMap &attrs)
      : ConditionalOp(type, inputs, outputs, attrs) {}
27 28 29 30

 private:
  void RunImpl(const framework::Scope &scope,
               const platform::Place &dev_place) const override {
31 32
    bool need_run;
    if (Attr<bool>("is_scalar_condition")) {
33 34 35 36
      // When is_scalar_condition is True, the conditional variable is a scalar,
      // whether need to execute the operators in sub-block depends on the
      // conditional variable (Cond).
      auto xs = InputTensors(scope, "Cond");
37 38
      need_run = ScalarCondition(xs);
    } else {
39 40 41 42
      // When is_scalar_condition is False, the conditional variable maybe a
      // vector or tensor, whether need to execute the operators in sub-block
      // depends on the input variables (Input).
      auto xs = InputTensors(scope, "Input");
43 44 45 46
      need_run = std::all_of(
          xs.begin(), xs.end(),
          [](const framework::LoDTensor *t) { return t->numel() != 0; });
    }
Y
Yu Yang 已提交
47 48 49 50 51 52 53 54 55

    if (need_run) {
      auto *scope_var = scope.FindVar(Output("Scope"));
      PADDLE_ENFORCE(scope_var != nullptr, "Must set scope");
      auto *scopes = scope_var->GetMutable<std::vector<framework::Scope *>>();
      scopes->resize(1);
      scopes->front() = &scope.NewScope();
      auto &cur_scope = *scopes->front();

D
dzhwinter 已提交
56
      framework::Executor exec(dev_place);
Y
Yu Yang 已提交
57
      auto *block = Attr<framework::BlockDesc *>("sub_block");
Y
Yu Yang 已提交
58 59 60 61 62 63 64 65 66 67 68 69
      exec.Run(*block->Program(), &cur_scope, block->ID(), false);
    }
  }
};

class ConditionalBlockGradOp : public ConditionalOp {
 public:
  ConditionalBlockGradOp(const std::string &type,
                         const framework::VariableNameMap &inputs,
                         const framework::VariableNameMap &outputs,
                         const framework::AttributeMap &attrs)
      : ConditionalOp(type, inputs, outputs, attrs) {}
70 71 72 73

 private:
  void RunImpl(const framework::Scope &scope,
               const platform::Place &dev_place) const override {
74 75
    bool need_run;
    if (Attr<bool>("is_scalar_condition")) {
76
      auto xs = this->InputTensors(scope, "Cond");
77 78
      need_run = ScalarCondition(xs);
    } else {
79
      auto xs = this->InputTensors(scope, "Input");
80 81 82 83
      need_run = std::all_of(
          xs.begin(), xs.end(),
          [](const framework::LoDTensor *t) { return t->numel() != 0; });
    }
Y
Yu Yang 已提交
84 85 86 87 88 89 90

    if (need_run) {
      auto *scope_var = scope.FindVar(Input("Scope"));
      PADDLE_ENFORCE(scope_var != nullptr, "Must set scope");
      auto &scopes = scope_var->Get<std::vector<framework::Scope *>>();
      framework::Scope &cur_scope = *scopes[0];

D
dzhwinter 已提交
91
      framework::Executor exec(dev_place);
Y
Yu Yang 已提交
92
      auto *block = Attr<framework::BlockDesc *>("sub_block");
Y
Yu Yang 已提交
93

94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
      const auto &ins = Inputs("Input");
      const auto &d_ins = Outputs(framework::GradVarName("Input"));
      const auto &conds = Inputs("Cond");
      const auto &d_conds = Outputs(framework::GradVarName("Cond"));

      std::vector<std::string> ins_conds_grads;
      ins_conds_grads.reserve(ins.size() + conds.size());
      for (auto &in : ins) {
        ins_conds_grads.emplace_back(framework::GradVarName(in));
      }
      for (auto &cond : conds) {
        ins_conds_grads.emplace_back(framework::GradVarName(cond));
      }

      exec.Run(*block->Program(), &cur_scope, block->ID(), false, true,
               ins_conds_grads);

      AssignLocalGradientToGlobal(dev_place, cur_scope, ins_conds_grads.data(),
                                  ins.size(), d_ins);
Y
Yu Yang 已提交
113

114 115 116
      AssignLocalGradientToGlobal(dev_place, cur_scope,
                                  ins_conds_grads.data() + ins.size(),
                                  conds.size(), d_conds);
Y
Yu Yang 已提交
117 118 119 120 121
    }
  }

 private:
  void AssignLocalGradientToGlobal(
D
dzhwinter 已提交
122
      const platform::Place &place, const framework::Scope &cur_scope,
123
      const std::string *p_grad_names, size_t p_grad_names_num,
Y
Yu Yang 已提交
124
      const std::vector<std::string> &pg_names) const {
125
    for (size_t i = 0; i < p_grad_names_num; ++i) {
Y
Yu Yang 已提交
126
      auto out_grad_name = pg_names[i];
127
      const auto &in_grad_name = p_grad_names[i];
Y
Yu Yang 已提交
128 129 130 131 132
      auto *in_var = cur_scope.FindVar(in_grad_name);
      if (in_var == nullptr) {
        continue;
      }
      auto new_in_grad_name = cur_scope.Rename(in_grad_name);
Y
Yiqun Liu 已提交
133 134 135
      auto assign = framework::OpRegistry::CreateOp(
          "assign", {{"X", {new_in_grad_name}}}, {{"Out", {out_grad_name}}},
          framework::AttributeMap{});
D
dzhwinter 已提交
136
      assign->Run(cur_scope, place);
Y
Yu Yang 已提交
137 138 139 140 141 142 143 144
      cur_scope.Rename(new_in_grad_name, in_grad_name);
    }
  }
};

class ConditionalBlockGradInferShape : public framework::InferShapeBase {
 public:
  void operator()(framework::InferShapeContext *context) const override {
145 146 147 148 149
    PADDLE_ENFORCE(context->HasInputs("Cond"));
    if (context->HasInputs("Input")) {
      PADDLE_ENFORCE(context->HasOutputs(framework::GradVarName("Input")));
      context->SetOutputsDim(framework::GradVarName("Input"),
                             context->GetInputsDim("Input"));
Y
Yu Yang 已提交
150
    }
151 152 153
    if (context->HasOutputs(framework::GradVarName("Cond"))) {
      context->SetOutputsDim(framework::GradVarName("Cond"),
                             context->GetInputsDim("Cond"));
154
    }
Y
Yu Yang 已提交
155 156 157 158 159 160 161 162
  }
};

class ConditionalBlockGradMaker : public framework::SingleGradOpDescMaker {
 public:
  using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;

 protected:
Y
Yu Yang 已提交
163 164
  std::unique_ptr<framework::OpDesc> Apply() const override {
    auto grad_op = new framework::OpDesc();
Y
Yu Yang 已提交
165
    grad_op->SetType("conditional_block_grad");
166 167
    grad_op->SetInput("Cond", Input("Cond"));
    grad_op->SetInput("Input", Input("Input"));
Y
Yu Yang 已提交
168 169 170
    grad_op->SetInput("Out", Output("Out"));
    grad_op->SetInput(framework::GradVarName("Out"), OutputGrad("Out"));
    grad_op->SetInput("Scope", Output("Scope"));
171 172 173 174
    grad_op->SetOutput(framework::GradVarName("Cond"),
                       InputGrad("Cond", false));
    grad_op->SetOutput(framework::GradVarName("Input"),
                       InputGrad("Input", false));
A
Abhinav Arora 已提交
175
    grad_op->SetBlockAttr("sub_block", this->grad_block_[0]);
176
    grad_op->SetAttr("is_scalar_condition", GetAttr("is_scalar_condition"));
Y
Yu Yang 已提交
177
    return std::unique_ptr<framework::OpDesc>(grad_op);
Y
Yu Yang 已提交
178 179 180 181 182 183 184 185 186 187 188 189
  }
};

}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
REGISTER_OPERATOR(conditional_block, ops::ConditionalBlockOp,
                  ops::ConditionalBlockOpProtoMaker,
                  ops::ConditionalBlockGradMaker);
REGISTER_OPERATOR(conditional_block_grad, ops::ConditionalBlockGradOp,
                  ops::ConditionalBlockGradInferShape);