nccl_op.cc 7.0 KB
Newer Older
D
Dong Zhihong 已提交
1 2 3 4 5 6 7 8 9 10 11
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.
   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at
   http://www.apache.org/licenses/LICENSE-2.0
   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

12 13
#include "paddle/framework/op_registry.h"
#include "paddle/operators/nccl/nccl_gpu_common.h"
D
dongzhihong 已提交
14 15 16 17

namespace paddle {
namespace operators {

D
Dong Zhihong 已提交
18
// NCCLinitOp
19
class NCCLInitOp : public framework::OperatorBase {
D
Dong Zhihong 已提交
20
 public:
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35
  NCCLInitOp(const std::string &type, const framework::VariableNameMap &inputs,
             const framework::VariableNameMap &outputs,
             const framework::AttributeMap &attrs)
      : OperatorBase(type, inputs, outputs, attrs) {}

  void Run(const framework::Scope &scope,
           const platform::DeviceContext &dev_ctx) const override {
    const auto &name = Output("Communicator");
    PADDLE_ENFORCE_NOT_NULL(scope.FindVar(name),
                            "Can not find variable '%s' in the scope.", name);
    std::vector<int> gpus = Attr<std::vector<int>>("gpus");
    PADDLE_ENFORCE(!gpus.empty(), "Attr(gpus) should not be empty.");
    platform::Communicator *comm =
        scope.FindVar(name)->GetMutable<platform::Communicator>();
    comm->InitAll(gpus);
D
Dong Zhihong 已提交
36 37 38 39 40 41 42 43 44 45
  }
};

class NCCLInitOpMaker : public framework::OpProtoAndCheckerMaker {
 public:
  NCCLInitOpMaker(framework::OpProto *proto,
                  framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddOutput("Communicator",
              "Create Communicator for communicating between gpus");
D
Dong Zhihong 已提交
46 47 48
    AddAttr<std::vector<int>>("gpus", "gpu id lists");
    AddAttr<int>("data_type", "output data type")
        .SetDefault(framework::DataType::FP32);
D
Dong Zhihong 已提交
49 50 51 52 53 54 55
    AddComment(R"DOC(
               create communicator.
        )DOC");
  }
};

// AllReduceOp
D
dzhwinter 已提交
56
class NCCLAllReduceOp : public framework::OperatorWithKernel {
D
dongzhihong 已提交
57 58 59 60
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

 protected:
D
Dong Zhihong 已提交
61 62 63
  void InferShape(framework::InferShapeContext *ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"),
                   " Input(X) of AllReduce op input should not be NULL");
D
Dong Zhihong 已提交
64 65 66
    PADDLE_ENFORCE(
        ctx->HasInput("Communicator"),
        " Input(Communicator) of AllReduce op input should not be NULL");
D
Dong Zhihong 已提交
67 68
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   " Input(X) of AllReduce op input should not be NULL");
D
dongzhihong 已提交
69

D
Dong Zhihong 已提交
70
    auto x_dims = ctx->GetInputsDim("X");
D
dongzhihong 已提交
71

D
Dong Zhihong 已提交
72 73 74 75
    // std::string reduction = ctx->Attrs().Get<std::string>("reduction");
    // PADDLE_ENFORCE((reduction == "ncclSum" || reduction == "ncclProd" ||
    //                 reduction == "ncclMin" || reduction == "ncclMax"),
    //                "invalid reduction.");
D
dongzhihong 已提交
76

D
Dong Zhihong 已提交
77 78
    ctx->SetOutputsDim("Out", x_dims);
    ctx->ShareLoD("X", /*->*/ "Out");
D
dzhwinter 已提交
79 80 81
  }
};

D
Dong Zhihong 已提交
82 83 84 85 86 87 88 89 90 91 92 93 94 95
// ReduceOp
class NCCLReduceOp : public framework::OperatorWithKernel {
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

 protected:
  void InferShape(framework::InferShapeContext *ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"),
                   " Input(X) of Reduce op input should not be NULL");
    PADDLE_ENFORCE(
        ctx->HasInput("Communicator"),
        " Input(Communicator) of Reduce op input should not be NULL");
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   " Input(X) of Reduce op input should not be NULL");
D
Dong Zhihong 已提交
96 97 98 99

    auto x_dims = ctx->GetInputsDim("X");
    ctx->SetOutputsDim("Out", x_dims);
    ctx->ShareLoD("X", /*->*/ "Out");
D
Dong Zhihong 已提交
100 101 102
  }
};

D
Dong Zhihong 已提交
103 104
// BcastOp
class NCCLBcastOp : public framework::OperatorWithKernel {
D
Dong Zhihong 已提交
105 106 107 108 109 110 111 112 113 114 115
 public:
  using framework::OperatorWithKernel::OperatorWithKernel;

 protected:
  void InferShape(framework::InferShapeContext *ctx) const override {
    PADDLE_ENFORCE(ctx->HasInput("X"),
                   " Input(X) of Bcast op input should not be NULL");
    PADDLE_ENFORCE(ctx->HasInput("Communicator"),
                   " Input(Communicator) of Bcast op input should not be NULL");
    PADDLE_ENFORCE(ctx->HasOutput("Out"),
                   " Output(Out) of Bcast op output should not be NULL");
D
Dong Zhihong 已提交
116

D
Dong Zhihong 已提交
117 118 119
    int root = ctx->Attrs().Get<int>("root");
    PADDLE_ENFORCE(root != -1, "Bcast root must be set.");

D
Dong Zhihong 已提交
120 121 122
    auto x_dims = ctx->GetInputsDim("X");
    ctx->SetOutputsDim("Out", x_dims);
    ctx->ShareLoD("X", /*->*/ "Out");
D
Dong Zhihong 已提交
123 124 125
  }
};

D
Dong Zhihong 已提交
126
// AllreduceOp
D
dzhwinter 已提交
127
class NCCLAllReduceOpMaker : public framework::OpProtoAndCheckerMaker {
D
Dong Zhihong 已提交
128
 public:
D
Dong Zhihong 已提交
129 130 131
  NCCLAllReduceOpMaker(framework::OpProto *proto,
                       framework::OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
D
dzhwinter 已提交
132
    AddInput("X", "The input of AllReduce op");
D
Dong Zhihong 已提交
133
    AddInput("Communicator", "Communicator for communicating between gpus");
D
dzhwinter 已提交
134
    AddOutput("Out", "The output of AllReduce op");
D
Dong Zhihong 已提交
135 136
    // AddAttr<std::string>("reduction",
    //                      "{'ncclmin', 'ncclmax', 'ncclprod', 'ncclsum'}.");
D
Dong Zhihong 已提交
137
    // AddAttr<std::vector<int>>("gpus", "gpu id lists");
D
dzhwinter 已提交
138 139 140 141
    AddComment(R"DOC(
            AllReduce the input tensors.
        )DOC");
  }
D
dongzhihong 已提交
142
};
D
dzhwinter 已提交
143

D
Dong Zhihong 已提交
144 145
// ReduceOp
class NCCLReduceOpMaker : public framework::OpProtoAndCheckerMaker {
D
Dong Zhihong 已提交
146
 public:
D
Dong Zhihong 已提交
147 148
  NCCLReduceOpMaker(framework::OpProto *proto,
                    framework::OpAttrChecker *op_checker)
D
Dong Zhihong 已提交
149
      : OpProtoAndCheckerMaker(proto, op_checker) {
D
Dong Zhihong 已提交
150
    AddInput("X", "The input of Reduce op");
D
Dong Zhihong 已提交
151
    AddInput("Communicator", "Communicator for communicating between gpus");
D
Dong Zhihong 已提交
152 153 154 155
    AddOutput("Out", "The output of Reduce op");
    AddAttr<int>("root",
                 "root gpu of the parameter. if not set(-1). hashed by name.")
        .SetDefault(-1);
D
Dong Zhihong 已提交
156
    AddComment(R"DOC(
D
Dong Zhihong 已提交
157
            Reduce the tensors)DOC");
D
Dong Zhihong 已提交
158 159 160
  }
};

D
Dong Zhihong 已提交
161
// BcastOp
D
Dong Zhihong 已提交
162
class NCCLBcastOpMaker : public framework::OpProtoAndCheckerMaker {
D
Dong Zhihong 已提交
163
 public:
D
Dong Zhihong 已提交
164 165
  NCCLBcastOpMaker(framework::OpProto *proto,
                   framework::OpAttrChecker *op_checker)
D
Dong Zhihong 已提交
166
      : OpProtoAndCheckerMaker(proto, op_checker) {
D
Dong Zhihong 已提交
167
    AddInput("X", "The input of BcastSend op");
D
Dong Zhihong 已提交
168
    AddInput("Communicator", "Communicator for communicating between gpus");
D
Dong Zhihong 已提交
169
    AddOutput("Out", "The output of Bcast");
D
Dong Zhihong 已提交
170 171 172
    AddAttr<int>("root",
                 "root gpu of the parameter. if not set(-1). hashed by name.")
        .SetDefault(-1);
D
Dong Zhihong 已提交
173 174 175 176 177
    AddComment(R"DOC(
            Bcast the tensors.
        )DOC");
  }
};
D
dzhwinter 已提交
178

D
Dong Zhihong 已提交
179 180 181 182
}  // namespace operators
}  // namespace paddle

namespace ops = paddle::operators;
183 184 185
REGISTER_OPERATOR(ncclInit, ops::NCCLInitOp,
                  paddle::framework::EmptyGradOpMaker, ops::NCCLInitOpMaker);

D
Dong Zhihong 已提交
186 187
REGISTER_OP_WITHOUT_GRADIENT(ncclAllReduce, ops::NCCLAllReduceOp,
                             ops::NCCLAllReduceOpMaker);
D
Dong Zhihong 已提交
188 189
REGISTER_OP_WITHOUT_GRADIENT(ncclBcast, ops::NCCLBcastOp,
                             ops::NCCLBcastOpMaker);
D
Dong Zhihong 已提交
190 191
REGISTER_OP_WITHOUT_GRADIENT(ncclReduce, ops::NCCLReduceOp,
                             ops::NCCLReduceOpMaker);