From 075e1cf78e4104a6c19bd64e593afac00a2f2c21 Mon Sep 17 00:00:00 2001 From: whs Date: Wed, 24 Jul 2019 18:20:21 +0800 Subject: [PATCH] Add python API for appending LoD level (#18702) * Make lod reset op support for append lod level. * Fix API.spec test=develop * Fix unitest. test=develop * Add python api for lod append. test=develop * Fix API.spec test=develop * Fix format of doc. test=develop * Fix unitest. test=develop * Fix doc. test=develop --- paddle/fluid/API.spec | 3 +- paddle/fluid/operators/lod_reset_op.cc | 10 +++- paddle/fluid/operators/lod_reset_op.h | 12 +++- python/paddle/fluid/layers/nn.py | 59 ++++++++++++++++++- .../fluid/tests/unittests/test_layers.py | 8 +++ .../tests/unittests/test_lod_reset_op.py | 21 +++++++ 6 files changed, 105 insertions(+), 8 deletions(-) diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index 99cdb33136..6cc9e9e1db 100755 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -163,7 +163,8 @@ paddle.fluid.layers.autoincreased_step_counter (ArgSpec(args=['counter_name', 'b paddle.fluid.layers.reshape (ArgSpec(args=['x', 'shape', 'actual_shape', 'act', 'inplace', 'name'], varargs=None, keywords=None, defaults=(None, None, False, None)), ('document', '6196c9ec3075ca5a9c058ea1f8492256')) paddle.fluid.layers.squeeze (ArgSpec(args=['input', 'axes', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', 'ebbac07662a6e22e8e299ced880c7775')) paddle.fluid.layers.unsqueeze (ArgSpec(args=['input', 'axes', 'name'], varargs=None, keywords=None, defaults=(None,)), ('document', 'b9bd3129d36a70e7c4385df51ff71c62')) -paddle.fluid.layers.lod_reset (ArgSpec(args=['x', 'y', 'target_lod'], varargs=None, keywords=None, defaults=(None, None)), ('document', '9a72a7c8c80926150ea826e94efd7e9b')) +paddle.fluid.layers.lod_reset (ArgSpec(args=['x', 'y', 'target_lod'], varargs=None, keywords=None, defaults=(None, None)), ('document', '74498d37dd622ac472cb36887fce09ea')) +paddle.fluid.layers.lod_append (ArgSpec(args=['x', 'level'], varargs=None, keywords=None, defaults=None), ('document', '527cc61c20a8fa83008115440419f92b')) paddle.fluid.layers.lrn (ArgSpec(args=['input', 'n', 'k', 'alpha', 'beta', 'name'], varargs=None, keywords=None, defaults=(5, 1.0, 0.0001, 0.75, None)), ('document', '73d297256da8954617996958d26ee93d')) paddle.fluid.layers.pad (ArgSpec(args=['x', 'paddings', 'pad_value', 'name'], varargs=None, keywords=None, defaults=(0.0, None)), ('document', '2f189f8ef61f1c23779e1593b78755c0')) paddle.fluid.layers.pad_constant_like (ArgSpec(args=['x', 'y', 'pad_value', 'name'], varargs=None, keywords=None, defaults=(0.0, None)), ('document', '95aa1972983f30fe9b5a3713e523e20f')) diff --git a/paddle/fluid/operators/lod_reset_op.cc b/paddle/fluid/operators/lod_reset_op.cc index 458037c5ac..409f8397eb 100644 --- a/paddle/fluid/operators/lod_reset_op.cc +++ b/paddle/fluid/operators/lod_reset_op.cc @@ -36,7 +36,10 @@ class LoDResetOp : public framework::OperatorWithKernel { } else if (ctx->IsRuntime()) { ctx->ShareLoD("Y", "Out"); } - + auto append = ctx->Attrs().Get("append"); + if (append) { + ctx->ShareLoD("X", /*->*/ "Out"); + } ctx->SetOutputDim("Out", ctx->GetInputDim("X")); } @@ -53,10 +56,14 @@ class LoDResetOpVarTypeInference : public framework::VarTypeInference { void operator()(framework::InferVarTypeContext *ctx) const override { auto x_var_name = ctx->Input("X").front(); auto out_var_name = ctx->Output("Out").front(); + bool append = boost::get(ctx->GetAttr("append")); if (ctx->HasInput("Y")) { auto y_var_name = ctx->Input("Y").front(); auto y_lod_level = std::max(ctx->GetLoDLevel(y_var_name), 1); ctx->SetLoDLevel(out_var_name, y_lod_level); + } else if (append) { + auto x_lod_level = std::max(ctx->GetLoDLevel(x_var_name), 1); + ctx->SetLoDLevel(out_var_name, x_lod_level); } else { ctx->SetLoDLevel(out_var_name, 1); } @@ -84,6 +91,7 @@ class LoDResetOpMaker : public framework::OpProtoAndCheckerMaker { AddAttr>("target_lod", "The target level 0 LoD from Attr().") .SetDefault(std::vector{}); + AddAttr("append", "Append data to lod vector.").SetDefault(false); AddComment(R"DOC(LoDReset operator Set LoD of `X` to a new one specified by `Y` or attribute `target_lod`. When `Y` diff --git a/paddle/fluid/operators/lod_reset_op.h b/paddle/fluid/operators/lod_reset_op.h index 1c2f0b0ac8..d827f2a2eb 100644 --- a/paddle/fluid/operators/lod_reset_op.h +++ b/paddle/fluid/operators/lod_reset_op.h @@ -29,6 +29,7 @@ class LoDResetKernel : public framework::OpKernel { auto* out = ctx.Output("Out"); auto* in = ctx.Input("X"); auto* lod_t = ctx.Input("Y"); + bool append = ctx.Attr("append"); out->ShareDataWith(*in); @@ -71,9 +72,14 @@ class LoDResetKernel : public framework::OpKernel { std::vector ulevel0(level0.size(), 0); std::transform(level0.begin(), level0.end(), ulevel0.begin(), [](int a) { return static_cast(a); }); - framework::LoD target_lod; - target_lod.push_back(ulevel0); - out->set_lod(target_lod); + if (append) { + auto* out_lod = out->mutable_lod(); + out_lod->push_back(ulevel0); + } else { + framework::LoD target_lod; + target_lod.push_back(ulevel0); + out->set_lod(target_lod); + } } }; diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index f859a19a18..3412c995c0 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -107,6 +107,7 @@ __all__ = [ 'squeeze', 'unsqueeze', 'lod_reset', + 'lod_append', 'lrn', 'pad', 'pad_constant_like', @@ -6980,7 +6981,7 @@ def lod_reset(x, y=None, target_lod=None): considered as target LoD first, otherwise :attr:`y.data` would be considered as target LoD. If :attr:`y` is not provided, target LoD should be specified by :attr:`target_lod`. If target LoD is specified by - :attr:`Y.data` or :attr:`target_lod`, only one level LoD is supported. + :attr:`y.data` or :attr:`target_lod`, only one level LoD is supported. .. code-block:: text @@ -7032,7 +7033,7 @@ def lod_reset(x, y=None, target_lod=None): out.dims = [6, 1] Args: - x (Variable): Input variable which could be a Tensor or LodTensor. + x (Variable): Input variable which could be a Tensor or LoDTensor. y (Variable|None): If provided, output's LoD would be derived from :attr:`y`. target_lod (list|tuple|None): One level LoD which should be considered @@ -7065,8 +7066,60 @@ def lod_reset(x, y=None, target_lod=None): attrs={'target_lod': target_lod}, outputs={'Out': out}) else: - raise ValueError("y and target_lod should not be both None.") + raise ValueError("y and target_lod should not be both none.") + return out + + +def lod_append(x, level): + """ + Append level to LoD of :attr:`x`. + + .. code-block:: text + + * Example 1: + + given a 1-level LoDTensor x: + x.lod = [[ 2, 3, 1 ]] + x.data = [[1.0], [2.0], [3.0], [4.0], [5.0], [6.0]] + x.dims = [6, 1] + + level: [1, 1, 1, 1, 1, 1, 1] + + then we get a 2-level LoDTensor: + x.lod = [[ 2, 3, 1 ], [1, 1, 1, 1, 1, 1]] + x.data = [[1.0], [2.0], [3.0], [4.0], [5.0], [6.0]] + x.dims = [6, 1] + Args: + x (Variable): Input variable which could be a tensor or LoDTensor. + level (list|tuple): The LoD level to be appended into LoD of x. + + Returns: + Variable: Output variable with new LoD level. + + Raises: + ValueError: If :attr:`y` is None or and :attr:`level` is not Iterator. + + Examples: + .. code-block:: python + + import paddle.fluid as fluid + x = fluid.layers.data(name='x', shape=[6, 10], lod_level=1) + out = fluid.layers.lod_append(x, [1,1,1,1,1,1]) + """ + from collections import Iterable + if x is None: + raise ValueError("Input(x) can't be None.") + if not isinstance(level, Iterable): + raise ValueError("Input(level) must be list or tuple.") + helper = LayerHelper("lod_append", **locals()) + out = helper.create_variable_for_type_inference(dtype=x.dtype) + helper.append_op( + type="lod_reset", + inputs={'X': x}, + attrs={'target_lod': level, + 'append': True}, + outputs={'Out': out}) return out diff --git a/python/paddle/fluid/tests/unittests/test_layers.py b/python/paddle/fluid/tests/unittests/test_layers.py index 944b1bb12f..b071ce0a75 100644 --- a/python/paddle/fluid/tests/unittests/test_layers.py +++ b/python/paddle/fluid/tests/unittests/test_layers.py @@ -1793,6 +1793,14 @@ class TestBook(LayerTest): self.assertTrue(z.lod_level == 1) return z + def test_lod_append(self): + with self.static_graph(): + x = layers.data( + name='x', shape=[6, 10], dtype='float32', lod_level=1) + y = layers.lod_append(x, [1, 1, 1, 1, 1, 1]) + self.assertTrue(y.lod_level == 1) + return y + def test_affine_grid(self): with self.static_graph(): data = layers.data(name='data', shape=[2, 3, 3], dtype="float32") diff --git a/python/paddle/fluid/tests/unittests/test_lod_reset_op.py b/python/paddle/fluid/tests/unittests/test_lod_reset_op.py index 31f364a42f..6947ea7c8d 100644 --- a/python/paddle/fluid/tests/unittests/test_lod_reset_op.py +++ b/python/paddle/fluid/tests/unittests/test_lod_reset_op.py @@ -101,5 +101,26 @@ class TestLodResetOpYIsLoDTensor(OpTest): self.check_grad(["X"], "Out", no_grad_set=set("Y")) +class TestLodAppendOpByAttr(OpTest): + def setUp(self): + self.op_type = "lod_reset" + x = np.random.random((10, 20)).astype("float32") + lod = [[3, 2, 5]] + # target_offset_lod and target_lod are the same lod info represented + # in offset-based format and length-based format, respectively. + target_offset_lod = [i for i in range(11)] + self.inputs = {'X': (x, lod)} + out_lod = [[3, 2, 5], [1] * 10] + # The `target_lod` attribute is still based on offset + self.attrs = {'target_lod': target_offset_lod, 'append': True} + self.outputs = {'Out': (x, out_lod)} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(["X"], "Out") + + if __name__ == '__main__': unittest.main() -- GitLab