conditional_block_op.h 5.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

#include <algorithm>
#include <memory>
#include <string>
#include <vector>
W
wanghuancoder 已提交
21

22 23 24 25 26 27 28 29 30 31 32 33 34 35 36
#include "paddle/fluid/framework/executor.h"
#include "paddle/fluid/framework/op_registry.h"
#include "paddle/fluid/framework/var_type.h"

namespace paddle {
namespace operators {

class ConditionalOp : public framework::OperatorBase {
 public:
  ConditionalOp(const std::string &type,
                const framework::VariableNameMap &inputs,
                const framework::VariableNameMap &outputs,
                const framework::AttributeMap &attrs)
      : OperatorBase(type, inputs, outputs, attrs) {}

Z
Zeng Jinle 已提交
37 38 39 40 41 42
  static const char kInputs[];
  static const char kOutputs[];
  static const char kCondition[];
  static const char kScope[];
  static const char kSkipEagerDeletionVars[];

43 44 45 46 47 48 49
 protected:
  std::vector<const framework::LoDTensor *> InputTensors(
      const framework::Scope &scope, const std::string &in_name) const {
    std::vector<const framework::LoDTensor *> retv;
    auto xs = Inputs(in_name);
    retv.resize(xs.size(), nullptr);
    std::transform(
50 51 52
        xs.begin(),
        xs.end(),
        retv.begin(),
53 54
        [&scope](const std::string &var_name) -> const framework::LoDTensor * {
          auto *var = scope.FindVar(var_name);
55 56 57
          PADDLE_ENFORCE_NOT_NULL(var,
                                  platform::errors::InvalidArgument(
                                      "Cannot find variable %s", var_name));
58 59 60 61 62 63 64
          return &var->Get<framework::LoDTensor>();
        });
    return retv;
  }

  bool ScalarCondition(
      const std::vector<const framework::LoDTensor *> &ips) const {
65
    PADDLE_ENFORCE_EQ(
66 67
        ips.size() == 1UL && ips[0]->IsInitialized(),
        true,
68 69
        platform::errors::InvalidArgument(
            "condition should have one initialized input as condition"));
70

71 72
    PADDLE_ENFORCE_EQ(framework::TransToProtoVarType(ips[0]->dtype()) ==
                              framework::proto::VarType::BOOL &&
73
                          ips[0]->numel() == 1,
74 75 76 77 78
                      true,
                      platform::errors::InvalidArgument(
                          "condition input's data type should be bool, "
                          "numel should be 1, actual numel is %d",
                          ips[0]->numel()));
79 80
    bool res = false;
    if (platform::is_gpu_place(ips[0]->place())) {
81
#if defined(PADDLE_WITH_CUDA) || defined(PADDLE_WITH_HIP)
82 83 84 85
      framework::LoDTensor cpu_tensor;
      framework::TensorCopy(*ips[0], platform::CPUPlace(), &cpu_tensor);
      platform::DeviceContextPool::Instance().Get(ips[0]->place())->Wait();
      res = cpu_tensor.data<bool>()[0];
86 87 88 89 90 91 92
#endif
    } else if (platform::is_npu_place(ips[0]->place())) {
#ifdef PADDLE_WITH_ASCEND_CL
      framework::LoDTensor cpu_tensor;
      framework::TensorCopy(*ips[0], platform::CPUPlace(), &cpu_tensor);
      platform::DeviceContextPool::Instance().Get(ips[0]->place())->Wait();
      res = cpu_tensor.data<bool>()[0];
93 94 95 96 97 98 99 100 101 102 103
#endif
    } else {
      res = ips[0]->data<bool>()[0];
    }
    return res;
  }
};

class ConditionalBlockOpProtoMaker : public framework::OpProtoAndCheckerMaker {
 public:
  void Make() override {
Z
Zeng Jinle 已提交
104
    AddInput(ConditionalOp::kCondition,
105 106 107
             "The conditional variable of this operator. If Cond is empty, the "
             "whole sub-block will not be executed.")
        .AsDuplicable();
Z
Zeng Jinle 已提交
108 109 110 111 112
    AddInput(ConditionalOp::kInputs, "The input variables of the sub-block.")
        .AsDuplicable();
    AddOutput(ConditionalOp::kOutputs, "The output variables of the sub-block.")
        .AsDuplicable();
    AddOutput(ConditionalOp::kScope,
113 114 115 116 117 118 119 120 121
              "(std::vector<Scope*>) The step scope of conditional block. To "
              "unify the conditional block, rnn and while op, the type of "
              "scope is std::vector<Scope*>");
    AddAttr<framework::BlockDesc *>(
        "sub_block", "The step block of conditional block operator");
    AddAttr<bool>("is_scalar_condition",
                  "The conditional variable (Cond) is used as scalar "
                  "condition.")
        .SetDefault(false);
Z
Zeng Jinle 已提交
122 123 124
    AddAttr<std::vector<std::string>>(ConditionalOp::kSkipEagerDeletionVars,
                                      "Vars that would not be deleted when "
                                      "garbage collection strategy enables")
125 126
        .SetDefault(std::vector<std::string>())
        .AsExtra();
127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
    AddComment(R"DOC(Conditional block operator

If `is_scalar_condition` is True, the conditional variable (Cond) is a scalar,
run the operators in sub-block if Cond is True.

If `is_scalar_condition` is False, the conditional variable (Cond) is a vector or
tensor, run the operators in sub-block if all of input variables are not empty.


)DOC");
  }
};

}  // namespace operators
}  // namespace paddle