提交 e782b54b 编写于 作者: B baojun 提交者: tensor-tang

update sofmax with axis arg test=develop (#17190)

上级 71f0c6d5
......@@ -27,38 +27,38 @@ namespace paddle {
namespace operators {
namespace ngraphs {
std::shared_ptr<ngraph::Node> GetSoftmax(std::shared_ptr<ngraph::Node> x) {
std::shared_ptr<ngraph::Node> GetSoftmax(std::shared_ptr<ngraph::Node> x,
int axis = -1) {
auto x_shape = x->get_shape();
int rank = x_shape.size();
auto x_2d_shape = paddle::platform::FlattenTo2d(x_shape, rank - 1);
x = paddle::platform::NgReshaper(x, x_2d_shape);
size_t rank = x_shape.size();
size_t softmax_axis = axis;
if (axis < 0) softmax_axis = rank + axis;
auto x_max = std::make_shared<ngraph::op::Max>(x, ngraph::AxisSet{1});
auto x_max =
std::make_shared<ngraph::op::Max>(x, ngraph::AxisSet{softmax_axis});
auto x_max_bcast = std::make_shared<ngraph::op::Broadcast>(
x_max, x_2d_shape, ngraph::AxisSet{1});
x_max, x_shape, ngraph::AxisSet{softmax_axis});
auto x_shifted = x - x_max_bcast;
auto x_clipped =
paddle::operators::ngraphs::ElementwiseScalar<ngraph::op::Maximum>(
-64., x_shifted);
auto softmax =
std::make_shared<ngraph::op::Softmax>(x_clipped, ngraph::AxisSet{1});
auto softmax = std::make_shared<ngraph::op::Softmax>(
x_clipped, ngraph::AxisSet{softmax_axis});
return softmax;
}
std::shared_ptr<ngraph::Node> GetSoftmaxGrad(
std::shared_ptr<ngraph::Node> out, std::shared_ptr<ngraph::Node> dout) {
std::shared_ptr<ngraph::Node> GetSoftmaxGrad(std::shared_ptr<ngraph::Node> out,
std::shared_ptr<ngraph::Node> dout,
int axis = -1) {
auto out_shape = out->get_shape();
int rank = out_shape.size();
auto out_2d_shape = paddle::platform::FlattenTo2d(out_shape, rank - 1);
auto dout_2d_shape =
paddle::platform::FlattenTo2d(dout->get_shape(), rank - 1);
out = paddle::platform::NgReshaper(out, out_2d_shape);
dout = paddle::platform::NgReshaper(dout, dout_2d_shape);
size_t rank = out_shape.size();
size_t softmax_axis = axis;
if (axis < 0) softmax_axis = rank + axis;
auto node_sum =
std::make_shared<ngraph::op::Sum>(out * dout, ngraph::AxisSet{1});
auto node_sum = std::make_shared<ngraph::op::Sum>(
out * dout, ngraph::AxisSet{softmax_axis});
auto node_bcast = std::make_shared<ngraph::op::Broadcast>(
node_sum, out_2d_shape, ngraph::AxisSet{1});
node_sum, out_shape, ngraph::AxisSet{softmax_axis});
auto dx = (dout - node_bcast) * out;
return dx;
}
......@@ -68,8 +68,9 @@ void BuildSoftmaxNode(
std::shared_ptr<
std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
ngb_node_map) {
auto op_attrs = framework::AttrReader(op->Attrs());
auto x = paddle::platform::GetInputNode(op, "X", ngb_node_map);
auto softmax = GetSoftmax(x);
auto softmax = GetSoftmax(x, op_attrs.Get<int>("axis"));
paddle::platform::SetOutputNode(op, "Out", softmax, ngb_node_map);
}
......@@ -78,9 +79,10 @@ void BuildSoftmaxGradNode(
std::shared_ptr<
std::unordered_map<std::string, std::shared_ptr<ngraph::Node>>>
ngb_node_map) {
auto op_attrs = framework::AttrReader(op->Attrs());
auto out = paddle::platform::GetInputNode(op, "Out", ngb_node_map);
auto dout = paddle::platform::GetInputNode(op, "Out@GRAD", ngb_node_map);
auto dx = GetSoftmaxGrad(out, dout);
auto dx = GetSoftmaxGrad(out, dout, op_attrs.Get<int>("axis"));
paddle::platform::SetOutputNode(op, "X@GRAD", dx, ngb_node_map);
}
} // namespace ngraphs
......
......@@ -14,7 +14,7 @@
from __future__ import print_function
import unittest
from paddle.fluid.tests.unittests.test_softmax_op import TestSoftmaxOp
from paddle.fluid.tests.unittests.test_softmax_op import TestSoftmaxOp, TestSoftmaxOp2, TestSoftmaxOp3, TestSoftmaxOp4, TestSoftmaxOp5
if __name__ == "__main__":
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册