diff --git a/paddle/operators/accuracy_op.cc b/paddle/operators/accuracy_op.cc index 32479ae5a35194e942fb4012c268c534200e1db6..391258b40ba5e091ba05525babbda7f5ebac7b2b 100644 --- a/paddle/operators/accuracy_op.cc +++ b/paddle/operators/accuracy_op.cc @@ -55,15 +55,15 @@ class AccuracyOpMaker : public framework::OpProtoAndCheckerMaker { // TODO(typhoonzero): AddInput("Weight", ... AddOutput("Accuracy", "The accuracy of current batch"); - AddComment( - R"DOC(Accuracy. It will print accuracy rate for classification. + AddComment(R"DOC( +Accuracy. It will print accuracy rate for classification. The accuracy is: .. math:: accuracy = \\frac{NumOfCorrectPredicts}{NumOfAllSamples}) Both the input `Inference` and `Label` can carry the LoD (Level of Details) information, or not. But the output only shares the LoD with input `Inference`. -DOC"); +)DOC"); } }; diff --git a/paddle/operators/dropout_op.cc b/paddle/operators/dropout_op.cc index b111b9fccb2310bd5fb92bda878a497c51f62ce0..bfa1992d79f7c1e30a0ddfa203c90afd75934bf9 100644 --- a/paddle/operators/dropout_op.cc +++ b/paddle/operators/dropout_op.cc @@ -38,6 +38,7 @@ class DropoutOp : public framework::OperatorWithKernel { if (ctx.Attr("is_training") == 1) { ctx.Output("Mask")->Resize(dims); } + ctx.ShareLoD("X", "Out"); } }; diff --git a/paddle/operators/fc_op.cc b/paddle/operators/fc_op.cc index 56fe654d1e8df9ce2978dde4eb653bcfe38ebd02..5ac0e8cc45f007d42f1b6d7f86333f5cbedb3ea8 100644 --- a/paddle/operators/fc_op.cc +++ b/paddle/operators/fc_op.cc @@ -189,7 +189,6 @@ Activation type can be set to `identity` (default), `sigmoid` or `softmax`. All the inputs can carry the LoD (Level of Details) information, or not. But the output only shares the LoD with first input (`X[0]`). -)DOC"); )DOC"); } }; diff --git a/paddle/operators/pad_op.cc b/paddle/operators/pad_op.cc index a0b1c6b631d97a40d774f7d2ff9550fda9c32db4..98de18fb9f6189ce2646c1ec06a28d2ca992aeec 100644 --- a/paddle/operators/pad_op.cc +++ b/paddle/operators/pad_op.cc @@ -41,6 +41,11 @@ class PadOp : public framework::OperatorWithKernel { } ctx.Output("Out")->Resize( framework::make_ddim(out_dims)); + if (out_dims[0] == x_dim[0]) { + // Only pass LoD when the first dimension is equal between + // output and input. + ctx.ShareLoD("X", "Out"); + } } }; diff --git a/paddle/operators/reshape_op.cc b/paddle/operators/reshape_op.cc index 0d05e344148c68f5625dd819ec59c5991892e4ce..c0907586199ec9212f038c5ca5f029485ce1cb77 100644 --- a/paddle/operators/reshape_op.cc +++ b/paddle/operators/reshape_op.cc @@ -51,6 +51,11 @@ class ReshapeOp : public framework::OperatorWithKernel { [](int a) { return static_cast(a); }); auto out_dims = framework::make_ddim(shape_int64); ctx.Output("Out")->Resize(out_dims); + if (shape[0] == in->dims()[0]) { + // Only pass LoD when the first dimension is equal between + // output and input. + ctx.ShareLoD("X", "Out"); + } } };