未验证 提交 075e1cf7 编写于 作者: W whs 提交者: GitHub

Add python API for appending LoD level (#18702)

* Make lod reset op support for append lod level.

* Fix API.spec
test=develop

* Fix unitest.
test=develop

* Add python api for lod append.
test=develop

* Fix API.spec
test=develop

* Fix format of doc.
test=develop

* Fix unitest.
test=develop

* Fix doc.
test=develop
上级 8de5aa1b
......@@ -163,7 +163,8 @@ paddle.fluid.layers.autoincreased_step_counter (ArgSpec(args=['counter_name', 'b
paddle.fluid.layers.reshape (ArgSpec(args=['x', 'shape', 'actual_shape', 'act', 'inplace', 'name'], varargs=None, keywords=None, defaults=(None, None, False, None)), ('document', '6196c9ec3075ca5a9c058ea1f8492256'))
paddle.fluid.layers.squeeze (ArgSpec(args=['input', 'axes', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', 'ebbac07662a6e22e8e299ced880c7775'))
paddle.fluid.layers.unsqueeze (ArgSpec(args=['input', 'axes', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', 'b9bd3129d36a70e7c4385df51ff71c62'))
paddle.fluid.layers.lod_reset (ArgSpec(args=['x', 'y', 'target_lod'], varargs=None, keywords=None, defaults=(None, None)), ('document', '9a72a7c8c80926150ea826e94efd7e9b'))
paddle.fluid.layers.lod_reset (ArgSpec(args=['x', 'y', 'target_lod'], varargs=None, keywords=None, defaults=(None, None)), ('document', '74498d37dd622ac472cb36887fce09ea'))
paddle.fluid.layers.lod_append (ArgSpec(args=['x', 'level'], varargs=None, keywords=None, defaults=None), ('document', '527cc61c20a8fa83008115440419f92b'))
paddle.fluid.layers.lrn (ArgSpec(args=['input', 'n', 'k', 'alpha', 'beta', 'name'], varargs=None, keywords=None, defaults=(5, 1.0, 0.0001, 0.75, None)), ('document', '73d297256da8954617996958d26ee93d'))
paddle.fluid.layers.pad (ArgSpec(args=['x', 'paddings', 'pad_value', 'name'], varargs=None, keywords=None, defaults=(0.0, None)), ('document', '2f189f8ef61f1c23779e1593b78755c0'))
paddle.fluid.layers.pad_constant_like (ArgSpec(args=['x', 'y', 'pad_value', 'name'], varargs=None, keywords=None, defaults=(0.0, None)), ('document', '95aa1972983f30fe9b5a3713e523e20f'))
......
......@@ -36,7 +36,10 @@ class LoDResetOp : public framework::OperatorWithKernel {
} else if (ctx->IsRuntime()) {
ctx->ShareLoD("Y", "Out");
}
auto append = ctx->Attrs().Get<bool>("append");
if (append) {
ctx->ShareLoD("X", /*->*/ "Out");
}
ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
}
......@@ -53,10 +56,14 @@ class LoDResetOpVarTypeInference : public framework::VarTypeInference {
void operator()(framework::InferVarTypeContext *ctx) const override {
auto x_var_name = ctx->Input("X").front();
auto out_var_name = ctx->Output("Out").front();
bool append = boost::get<bool>(ctx->GetAttr("append"));
if (ctx->HasInput("Y")) {
auto y_var_name = ctx->Input("Y").front();
auto y_lod_level = std::max(ctx->GetLoDLevel(y_var_name), 1);
ctx->SetLoDLevel(out_var_name, y_lod_level);
} else if (append) {
auto x_lod_level = std::max(ctx->GetLoDLevel(x_var_name), 1);
ctx->SetLoDLevel(out_var_name, x_lod_level);
} else {
ctx->SetLoDLevel(out_var_name, 1);
}
......@@ -84,6 +91,7 @@ class LoDResetOpMaker : public framework::OpProtoAndCheckerMaker {
AddAttr<std::vector<int>>("target_lod",
"The target level 0 LoD from Attr().")
.SetDefault(std::vector<int>{});
AddAttr<bool>("append", "Append data to lod vector.").SetDefault(false);
AddComment(R"DOC(LoDReset operator
Set LoD of `X` to a new one specified by `Y` or attribute `target_lod`. When `Y`
......
......@@ -29,6 +29,7 @@ class LoDResetKernel : public framework::OpKernel<T> {
auto* out = ctx.Output<framework::LoDTensor>("Out");
auto* in = ctx.Input<framework::LoDTensor>("X");
auto* lod_t = ctx.Input<framework::LoDTensor>("Y");
bool append = ctx.Attr<bool>("append");
out->ShareDataWith(*in);
......@@ -71,10 +72,15 @@ class LoDResetKernel : public framework::OpKernel<T> {
std::vector<size_t> ulevel0(level0.size(), 0);
std::transform(level0.begin(), level0.end(), ulevel0.begin(),
[](int a) { return static_cast<size_t>(a); });
if (append) {
auto* out_lod = out->mutable_lod();
out_lod->push_back(ulevel0);
} else {
framework::LoD target_lod;
target_lod.push_back(ulevel0);
out->set_lod(target_lod);
}
}
};
template <typename DeviceContext, typename T>
......
......@@ -107,6 +107,7 @@ __all__ = [
'squeeze',
'unsqueeze',
'lod_reset',
'lod_append',
'lrn',
'pad',
'pad_constant_like',
......@@ -6980,7 +6981,7 @@ def lod_reset(x, y=None, target_lod=None):
considered as target LoD first, otherwise :attr:`y.data` would be
considered as target LoD. If :attr:`y` is not provided, target LoD should
be specified by :attr:`target_lod`. If target LoD is specified by
:attr:`Y.data` or :attr:`target_lod`, only one level LoD is supported.
:attr:`y.data` or :attr:`target_lod`, only one level LoD is supported.
.. code-block:: text
......@@ -7032,7 +7033,7 @@ def lod_reset(x, y=None, target_lod=None):
out.dims = [6, 1]
Args:
x (Variable): Input variable which could be a Tensor or LodTensor.
x (Variable): Input variable which could be a Tensor or LoDTensor.
y (Variable|None): If provided, output's LoD would be derived
from :attr:`y`.
target_lod (list|tuple|None): One level LoD which should be considered
......@@ -7065,8 +7066,60 @@ def lod_reset(x, y=None, target_lod=None):
attrs={'target_lod': target_lod},
outputs={'Out': out})
else:
raise ValueError("y and target_lod should not be both None.")
raise ValueError("y and target_lod should not be both none.")
return out
def lod_append(x, level):
"""
Append level to LoD of :attr:`x`.
.. code-block:: text
* Example 1:
given a 1-level LoDTensor x:
x.lod = [[ 2, 3, 1 ]]
x.data = [[1.0], [2.0], [3.0], [4.0], [5.0], [6.0]]
x.dims = [6, 1]
level: [1, 1, 1, 1, 1, 1, 1]
then we get a 2-level LoDTensor:
x.lod = [[ 2, 3, 1 ], [1, 1, 1, 1, 1, 1]]
x.data = [[1.0], [2.0], [3.0], [4.0], [5.0], [6.0]]
x.dims = [6, 1]
Args:
x (Variable): Input variable which could be a tensor or LoDTensor.
level (list|tuple): The LoD level to be appended into LoD of x.
Returns:
Variable: Output variable with new LoD level.
Raises:
ValueError: If :attr:`y` is None or and :attr:`level` is not Iterator.
Examples:
.. code-block:: python
import paddle.fluid as fluid
x = fluid.layers.data(name='x', shape=[6, 10], lod_level=1)
out = fluid.layers.lod_append(x, [1,1,1,1,1,1])
"""
from collections import Iterable
if x is None:
raise ValueError("Input(x) can't be None.")
if not isinstance(level, Iterable):
raise ValueError("Input(level) must be list or tuple.")
helper = LayerHelper("lod_append", **locals())
out = helper.create_variable_for_type_inference(dtype=x.dtype)
helper.append_op(
type="lod_reset",
inputs={'X': x},
attrs={'target_lod': level,
'append': True},
outputs={'Out': out})
return out
......
......@@ -1793,6 +1793,14 @@ class TestBook(LayerTest):
self.assertTrue(z.lod_level == 1)
return z
def test_lod_append(self):
with self.static_graph():
x = layers.data(
name='x', shape=[6, 10], dtype='float32', lod_level=1)
y = layers.lod_append(x, [1, 1, 1, 1, 1, 1])
self.assertTrue(y.lod_level == 1)
return y
def test_affine_grid(self):
with self.static_graph():
data = layers.data(name='data', shape=[2, 3, 3], dtype="float32")
......
......@@ -101,5 +101,26 @@ class TestLodResetOpYIsLoDTensor(OpTest):
self.check_grad(["X"], "Out", no_grad_set=set("Y"))
class TestLodAppendOpByAttr(OpTest):
def setUp(self):
self.op_type = "lod_reset"
x = np.random.random((10, 20)).astype("float32")
lod = [[3, 2, 5]]
# target_offset_lod and target_lod are the same lod info represented
# in offset-based format and length-based format, respectively.
target_offset_lod = [i for i in range(11)]
self.inputs = {'X': (x, lod)}
out_lod = [[3, 2, 5], [1] * 10]
# The `target_lod` attribute is still based on offset
self.attrs = {'target_lod': target_offset_lod, 'append': True}
self.outputs = {'Out': (x, out_lod)}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(["X"], "Out")
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册