提交 99369d43 编写于 作者: D dengkaipeng

fix doc. test=develop

上级 40405d13
...@@ -48,7 +48,7 @@ class KLDivLossOp : public framework::OperatorWithKernel { ...@@ -48,7 +48,7 @@ class KLDivLossOp : public framework::OperatorWithKernel {
if ("none" == reduction) { if ("none" == reduction) {
ctx->SetOutputDim("Loss", dim_x); ctx->SetOutputDim("Loss", dim_x);
} else { } else {
ctx->SetOutputDim("Loss", framework::make_ddim({1})); ctx->SetOutputDim("Loss", {1});
} }
} }
...@@ -81,7 +81,7 @@ class KLDivLossOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -81,7 +81,7 @@ class KLDivLossOpMaker : public framework::OpProtoAndCheckerMaker {
"The reduction type to apply to the output, available types " "The reduction type to apply to the output, available types "
"are 'none' | 'batchmean' | 'mean' | 'sum', 'none' for no " "are 'none' | 'batchmean' | 'mean' | 'sum', 'none' for no "
"reduction, 'batchmean' for the sum of output divided by " "reduction, 'batchmean' for the sum of output divided by "
"batchmean size, 'mean' for the average valud of all output, " "batch size, 'mean' for the average valud of all output, "
"'sum' for the sum of the output.") "'sum' for the sum of the output.")
.SetDefault("mean"); .SetDefault("mean");
......
...@@ -104,7 +104,8 @@ class KLDivLossGradKernel : public framework::OpKernel<T> { ...@@ -104,7 +104,8 @@ class KLDivLossGradKernel : public framework::OpKernel<T> {
auto loss_grad_expand = loss_grad_t.broadcast(Array1(expand)); auto loss_grad_expand = loss_grad_t.broadcast(Array1(expand));
auto grad_t = target_t * loss_grad_expand; auto grad_t = target_t * loss_grad_expand;
input_grad_t.device(place) = target_t.binaryExpr(grad_t, KLDivLossBackward<T>()); input_grad_t.device(place) =
target_t.binaryExpr(grad_t, KLDivLossBackward<T>());
if ("mean" == reduction) { if ("mean" == reduction) {
input_grad_t.device(place) = input_grad_t / static_cast<T>(numel); input_grad_t.device(place) = input_grad_t / static_cast<T>(numel);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册