diff --git a/paddle/fluid/operators/kldiv_loss_op.cc b/paddle/fluid/operators/kldiv_loss_op.cc index a3254c51c231d316ca504ccbdc3f175f2274e1ef..be84b57c6f46b1f891bd68e1c0dade731fc0503a 100644 --- a/paddle/fluid/operators/kldiv_loss_op.cc +++ b/paddle/fluid/operators/kldiv_loss_op.cc @@ -48,7 +48,7 @@ class KLDivLossOp : public framework::OperatorWithKernel { if ("none" == reduction) { ctx->SetOutputDim("Loss", dim_x); } else { - ctx->SetOutputDim("Loss", framework::make_ddim({1})); + ctx->SetOutputDim("Loss", {1}); } } @@ -81,7 +81,7 @@ class KLDivLossOpMaker : public framework::OpProtoAndCheckerMaker { "The reduction type to apply to the output, available types " "are 'none' | 'batchmean' | 'mean' | 'sum', 'none' for no " "reduction, 'batchmean' for the sum of output divided by " - "batchmean size, 'mean' for the average valud of all output, " + "batch size, 'mean' for the average valud of all output, " "'sum' for the sum of the output.") .SetDefault("mean"); diff --git a/paddle/fluid/operators/kldiv_loss_op.h b/paddle/fluid/operators/kldiv_loss_op.h index f262cfbb5fb337058cfe6c901efc2452148520d2..625e16e298d9f842fa621aca727c6df2cb045301 100644 --- a/paddle/fluid/operators/kldiv_loss_op.h +++ b/paddle/fluid/operators/kldiv_loss_op.h @@ -104,7 +104,8 @@ class KLDivLossGradKernel : public framework::OpKernel { auto loss_grad_expand = loss_grad_t.broadcast(Array1(expand)); auto grad_t = target_t * loss_grad_expand; - input_grad_t.device(place) = target_t.binaryExpr(grad_t, KLDivLossBackward()); + input_grad_t.device(place) = + target_t.binaryExpr(grad_t, KLDivLossBackward()); if ("mean" == reduction) { input_grad_t.device(place) = input_grad_t / static_cast(numel);