diff --git a/paddle/framework/attribute.cc b/paddle/framework/attribute.cc index 510dc28c57f642786e7c64d86961c76ac80014a8..d6a2975aaa419406aef7b228e78381dbce78890d 100644 --- a/paddle/framework/attribute.cc +++ b/paddle/framework/attribute.cc @@ -24,6 +24,9 @@ static ProgramDesc* g_program_desc = nullptr; ProgramDesc& GetProgramDesc() { if (g_program_desc == nullptr) { g_program_desc = new ProgramDesc(); + auto root_block = g_program_desc->mutable_blocks()->Add(); + root_block->set_idx(0); + root_block->set_parent_idx(-1); } return *g_program_desc; } diff --git a/paddle/operators/math/softmax.cc b/paddle/operators/math/softmax.cc index 1224c05810543226773b02b5aa1b26cea15d9f54..ac9f3c4bf61bf8e13faa17387f1112756db9a100 100644 --- a/paddle/operators/math/softmax.cc +++ b/paddle/operators/math/softmax.cc @@ -18,7 +18,7 @@ namespace paddle { namespace operators { namespace math { -template class SoftmaxFunctor; +template class SoftmaxFunctor; } // namespace math } // namespace operators diff --git a/paddle/operators/softmax_with_cross_entropy_op.cc b/paddle/operators/softmax_with_cross_entropy_op.cc index b6f33ad9e030bc69184de2fb5b88c9dcb07e71a9..e2299b254458cdd42dee4683561d4d5c81653fb1 100644 --- a/paddle/operators/softmax_with_cross_entropy_op.cc +++ b/paddle/operators/softmax_with_cross_entropy_op.cc @@ -82,40 +82,38 @@ class SoftmaxWithCrossEntropyOp : public framework::OperatorWithKernel { using framework::OperatorWithKernel::OperatorWithKernel; protected: - void InferShape(const framework::InferShapeContext& ctx) const override { - PADDLE_ENFORCE_NOT_NULL(ctx.InputVar("Logits"), - "Input(Logits) should be not null."); - PADDLE_ENFORCE_NOT_NULL(ctx.InputVar("Label"), - "Input(Label) should be not null."); - - PADDLE_ENFORCE_NOT_NULL(ctx.OutputVar("Softmax"), - "Output(Softmax) should be not null."); - PADDLE_ENFORCE_NOT_NULL(ctx.OutputVar("Loss"), - "Output(Loss) should be not null."); - - const Tensor* logits = ctx.Input("Logits"); - const Tensor* labels = ctx.Input("Label"); + void InferShape(framework::InferShapeContextBase* ctx) const override { + PADDLE_ENFORCE(ctx->HasInput("Logits"), + "Input(Logits) should be not null."); + PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null."); + + PADDLE_ENFORCE(ctx->HasOutput("Softmax"), + "Output(Softmax) should be not null."); + PADDLE_ENFORCE(ctx->HasOutput("Loss"), "Output(Loss) should be not null."); + + auto logits_dims = ctx->GetInputDim("Logits"); + auto labels_dims = ctx->GetInputDim("Label"); PADDLE_ENFORCE_EQ( - logits->dims().size(), 2UL, + logits_dims.size(), 2UL, "The input of softmax_with_cross_entropy should be a 2-D tensor."); - PADDLE_ENFORCE_EQ(ctx.Input("Label")->dims().size(), 2UL, + PADDLE_ENFORCE_EQ(labels_dims.size(), 2UL, "The labels should be a 2-D tensor."); - if (ctx.Attr("softLabel")) { - PADDLE_ENFORCE_EQ(logits->dims()[1], labels->dims()[1], + if (ctx->Attrs().Get("softLabel")) { + PADDLE_ENFORCE_EQ(logits_dims[1], labels_dims[1], "If Attr(softLabel) == true, the 2nd dimension of " "Input(X) and Input(Label) should be equal."); } else { - PADDLE_ENFORCE_EQ(labels->dims()[1], 1UL, + PADDLE_ENFORCE_EQ(labels_dims[1], 1UL, "If Attr(softLabel) == false, the 2nd dimension of " "Input(Label) should be 1."); } - ctx.Output("Softmax")->Resize(logits->dims()); - ctx.Output("Loss")->Resize({logits->dims()[0], 1}); + ctx->SetOutputDim("Softmax", logits_dims); + ctx->SetOutputDim("Loss", {logits_dims[0], 1}); - ctx.ShareLoD("Logits", /*->*/ "Softmax"); - ctx.ShareLoD("Logits", /*->*/ "Loss"); + ctx->ShareLoD("Logits", /*->*/ "Softmax"); + ctx->ShareLoD("Logits", /*->*/ "Loss"); } }; @@ -124,33 +122,32 @@ class SoftmaxWithCrossEntropyOpGrad : public framework::OperatorWithKernel { using framework::OperatorWithKernel::OperatorWithKernel; protected: - void InferShape(const framework::InferShapeContext& ctx) const override { - PADDLE_ENFORCE_NOT_NULL(ctx.InputVar(framework::GradVarName("Loss")), - "Input(Loss@Grad) should not be null."); - PADDLE_ENFORCE_NOT_NULL(ctx.InputVar("Softmax"), - "Input(Softmax) should be not null."); - PADDLE_ENFORCE_NOT_NULL(ctx.InputVar("Label"), - "Input(Label) should be not null."); - PADDLE_ENFORCE_NOT_NULL(ctx.OutputVar(framework::GradVarName("Logits")), - "Output(Logits@Grad) should be not null."); - - const Tensor* softmax = ctx.Input("Softmax"); - const Tensor* labels = ctx.Input("Label"); - PADDLE_ENFORCE_EQ(ctx.Input("Label")->dims().size(), 2UL, + void InferShape(framework::InferShapeContextBase* ctx) const override { + PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Loss")), + "Input(Loss@Grad) should not be null."); + PADDLE_ENFORCE(ctx->HasInput("Softmax"), + "Input(Softmax) should be not null."); + PADDLE_ENFORCE(ctx->HasInput("Label"), "Input(Label) should be not null."); + PADDLE_ENFORCE(ctx->HasOutput(framework::GradVarName("Logits")), + "Output(Logits@Grad) should be not null."); + + auto softmax_dims = ctx->GetInputDim("Softmax"); + auto labels_dims = ctx->GetInputDim("Label"); + PADDLE_ENFORCE_EQ(labels_dims.size(), 2UL, "The labels should be a 2-D tensor."); - if (ctx.Attr("softLabel")) { - PADDLE_ENFORCE_EQ(softmax->dims()[1], labels->dims()[1], + if (ctx->Attrs().Get("softLabel")) { + PADDLE_ENFORCE_EQ(softmax_dims[1], labels_dims[1], "When Attr(softLabel) == true, the 2nd dimension of " "Input(X) and Input(Label) should be equal."); } else { - PADDLE_ENFORCE_EQ(labels->dims()[1], 1UL, + PADDLE_ENFORCE_EQ(labels_dims[1], 1UL, "When Attr(softLabel) == false, the 2nd dimension of " "Input(Label) should be 1."); } - ctx.Output(framework::GradVarName("Logits")) - ->Resize(ctx.Input("Softmax")->dims()); + ctx->SetOutputDim(framework::GradVarName("Logits"), + ctx->GetInputDim("Softmax")); } }; diff --git a/paddle/platform/enforce.h b/paddle/platform/enforce.h index df5f71ed760952ed042d7ffa40a4319a73fb93bf..b523ef03c0053622bfda5b4bf07515c1b480b4af 100644 --- a/paddle/platform/enforce.h +++ b/paddle/platform/enforce.h @@ -107,7 +107,7 @@ struct EnforceNotMet : public std::exception { template inline typename std::enable_if::type throw_on_error( - int stat, const Args&... args) { + bool stat, const Args&... args) { if (UNLIKELY(!(stat))) { throw std::runtime_error(string::Sprintf(args...)); } diff --git a/paddle/pybind/CMakeLists.txt b/paddle/pybind/CMakeLists.txt index 4f05406c7f74113d8fb10aa6914166e553858338..aa9ca4e31aa8bdae159ce2d8db8eadd2ab49dffc 100644 --- a/paddle/pybind/CMakeLists.txt +++ b/paddle/pybind/CMakeLists.txt @@ -1,6 +1,6 @@ if(WITH_PYTHON) cc_library(paddle_pybind SHARED - SRCS pybind.cc + SRCS pybind.cc protobuf.cc DEPS pybind python backward ${GLOB_OP_LIB}) endif(WITH_PYTHON) diff --git a/paddle/pybind/protobuf.cc b/paddle/pybind/protobuf.cc new file mode 100644 index 0000000000000000000000000000000000000000..de3f7bb97be9787e0e9b0130a87d19529edf44f3 --- /dev/null +++ b/paddle/pybind/protobuf.cc @@ -0,0 +1,575 @@ +/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. */ + +#include "paddle/pybind/protobuf.h" +#include +#include +#include "paddle/framework/attribute.h" + +// Cast boost::variant for PyBind. +// Copy from +// https://github.com/pybind/pybind11/issues/576#issuecomment-269563199 +namespace pybind11 { +namespace detail { + +// Can be replaced by a generic lambda in C++14 +struct variant_caster_visitor : public boost::static_visitor { + return_value_policy policy; + handle parent; + + variant_caster_visitor(return_value_policy policy, handle parent) + : policy(policy), parent(parent) {} + + template + handle operator()(T const &src) const { + return make_caster::cast(src, policy, parent); + } +}; + +template +struct variant_caster; + +template