logical_op.cc 7.0 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
L
Luo Tao 已提交
2 3 4 5 6 7 8 9 10
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
    http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
11

12
#include <algorithm>
13
#include <string>
14
#include <vector>
15

Y
Yi Wang 已提交
16
#include "paddle/fluid/framework/op_registry.h"
17
#include "paddle/fluid/operators/elementwise/elementwise_op_function.h"
18 19 20 21 22 23

namespace paddle {
namespace operators {
template <typename OpComment>
class BinaryLogicalOpProtoMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
24
  void Make() override {
25
    OpComment comment;
26 27 28 29 30 31 32 33 34 35
    AddInput("X",
             string::Sprintf("Left hand operand of %s operator. Must be "
                             "a Variable of type being one of bool, int8, "
                             "int16, int32, int64, float32, float64.",
                             comment.type));
    AddInput("Y",
             string::Sprintf("Right hand operand of %s operator. Must be "
                             "a Variable of type being one of bool, int8, "
                             "int16, int32, int64, float32, float64.",
                             comment.type));
S
Shibo Tao 已提交
36
    AddOutput("Out", string::Sprintf("n-dim bool Variable"));
37
    AddComment(string::Sprintf(R"DOC(%s Operator
38
It operates element-wise on X and Y, and returns the Out. X, Y and Out are N-dim phi::DenseTensor or Tensor.
39 40
Each element of Out is calculated by %s
)DOC",
41 42
                               comment.type,
                               comment.equation));
43 44 45 46 47 48
  }
};

template <typename OpComment>
class UnaryLogicalOpProtoMaker : public framework::OpProtoAndCheckerMaker {
 public:
Y
Yu Yang 已提交
49
  void Make() override {
50
    OpComment comment;
51
    AddInput("X",
52 53 54 55 56 57
             string::Sprintf(
                 "Operand of %s operator. Must be "
                 "a phi::DenseTensor or Tensor of type being one of bool, "
                 "int8, int16, int32, int64, float32, float64.",
                 comment.type));
    AddOutput("Out", string::Sprintf("n-dim bool phi::DenseTensor or Tensor."));
58
    AddComment(string::Sprintf(R"DOC(%s Operator
59
It operates element-wise on X, and returns the Out. X and Out are N-dim phi::DenseTensor or Tensor.
60 61
Each element of Out is calculated by %s
)DOC",
62 63
                               comment.type,
                               comment.equation));
64 65 66
  }
};

Z
Zeng Jinle 已提交
67 68 69 70 71
class LogicalOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

 protected:
72
  phi::KernelKey GetExpectedKernelType(
Z
Zeng Jinle 已提交
73
      const framework::ExecutionContext &ctx) const override {
74
    phi::KernelKey kt = OperatorWithKernel::GetExpectedKernelType(ctx);
Z
Zeng Jinle 已提交
75
    // LogicalOp kernel's device type is decided by input tensor place
76 77
    kt.set_backend(
        phi::TransToPhiBackend(ctx.Input<phi::DenseTensor>("X")->place()));
Z
Zeng Jinle 已提交
78 79 80 81 82 83 84 85 86 87 88 89
    return kt;
  }
};

template <typename OpComment>
class UnaryLogicalOp : public LogicalOp {
 public:
  using LogicalOp::LogicalOp;

 protected:
  void InferShape(framework::InferShapeContext *context) const override {
    OpComment comment;
90
    OP_INOUT_CHECK(context->HasInput("X"), "Input", "X", comment.type);
Z
Zeng Jinle 已提交
91 92 93 94 95
    context->SetOutputDim("Out", context->GetInputDim("X"));
    context->ShareLoD("X", "Out");
  }
};

96
template <typename OpComment>
Z
Zeng Jinle 已提交
97
class BinaryLogicalOp : public LogicalOp {
98
 public:
Z
Zeng Jinle 已提交
99 100 101 102
  using LogicalOp::LogicalOp;

 protected:
  void InferShape(framework::InferShapeContext *context) const override {
103
    OpComment comment;
104 105
    OP_INOUT_CHECK(context->HasInput("X"), "Input", "X", comment.type);
    OP_INOUT_CHECK(context->HasInput("Y"), "Input", "Y", comment.type);
106 107
    auto dim_x = context->GetInputDim("X");
    auto dim_y = context->GetInputDim("Y");
108 109 110 111 112 113 114 115
    if (dim_x == dim_y) {
      context->SetOutputDim("Out", dim_x);
    } else {
      int max_dim = std::max(dim_x.size(), dim_y.size());
      int axis = std::abs(dim_x.size() - dim_y.size());
      std::vector<int> x_dims_array(max_dim);
      std::vector<int> y_dims_array(max_dim);
      std::vector<int> out_dims_array(max_dim);
116 117 118 119 120 121 122
      GetBroadcastDimsArrays(dim_x,
                             dim_y,
                             x_dims_array.data(),
                             y_dims_array.data(),
                             out_dims_array.data(),
                             max_dim,
                             axis);
123
      context->SetOutputDim("Out", phi::make_ddim(out_dims_array));
S
superjomn 已提交
124
    }
125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
    context->ShareLoD("X", "Out");
  }
};

}  // namespace operators
}  // namespace paddle

#define REGISTER_BINARY_LOGICAL_OP(op_type, _equation)                     \
  struct _##op_type##Comment {                                             \
    static char type[];                                                    \
    static char equation[];                                                \
  };                                                                       \
  char _##op_type##Comment::type[]{#op_type};                              \
  char _##op_type##Comment::equation[]{_equation};                         \
  REGISTER_OPERATOR(                                                       \
140 141
      op_type,                                                             \
      ::paddle::operators::BinaryLogicalOp<_##op_type##Comment>,           \
142
      ::paddle::operators::BinaryLogicalOpProtoMaker<_##op_type##Comment>, \
H
hong 已提交
143 144
      ::paddle::framework::EmptyGradOpMaker<paddle::framework::OpDesc>,    \
      ::paddle::framework::EmptyGradOpMaker<paddle::imperative::OpBase>);
145 146 147 148 149 150 151 152 153

#define REGISTER_UNARY_LOGICAL_OP(op_type, _equation)                     \
  struct _##op_type##Comment {                                            \
    static char type[];                                                   \
    static char equation[];                                               \
  };                                                                      \
  char _##op_type##Comment::type[]{#op_type};                             \
  char _##op_type##Comment::equation[]{_equation};                        \
  REGISTER_OPERATOR(                                                      \
154 155
      op_type,                                                            \
      ::paddle::operators::UnaryLogicalOp<_##op_type##Comment>,           \
156
      ::paddle::operators::UnaryLogicalOpProtoMaker<_##op_type##Comment>, \
H
hong 已提交
157 158
      ::paddle::framework::EmptyGradOpMaker<paddle::framework::OpDesc>,   \
      ::paddle::framework::EmptyGradOpMaker<paddle::imperative::OpBase>);
159

Y
update  
yi.wu 已提交
160 161 162
REGISTER_BINARY_LOGICAL_OP(logical_and, "$$Out = X \\&\\& Y$$");
REGISTER_BINARY_LOGICAL_OP(logical_or, "$$Out = X || Y$$");
REGISTER_UNARY_LOGICAL_OP(logical_not, "$$Out = !X$$");
163
REGISTER_BINARY_LOGICAL_OP(logical_xor,
Y
update  
yi.wu 已提交
164
                           "$$Out = (X || Y) \\&\\& !(X \\&\\& Y)$$");