From 16922e00937525005829caef87993ed50ad071de Mon Sep 17 00:00:00 2001 From: Tao Luo Date: Tue, 7 May 2019 10:35:50 +0800 Subject: [PATCH] fix api_example of tree_conv (#17239) test=develop --- paddle/fluid/API.spec | 2 +- python/paddle/fluid/layers/nn.py | 22 ++++++++++------------ 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index 13adab474b2..03c7f32a126 100644 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -227,7 +227,7 @@ paddle.fluid.layers.psroi_pool (ArgSpec(args=['input', 'rois', 'output_channels' paddle.fluid.layers.teacher_student_sigmoid_loss (ArgSpec(args=['input', 'label', 'soft_max_up_bound', 'soft_max_lower_bound'], varargs=None, keywords=None, defaults=(15.0, -15.0)), ('document', '2f6ff96864054a31aa4bb659c6722c99')) paddle.fluid.layers.huber_loss (ArgSpec(args=['input', 'label', 'delta'], varargs=None, keywords=None, defaults=None), ('document', '431a4301c35032166ec029f7432c80a7')) paddle.fluid.layers.kldiv_loss (ArgSpec(args=['x', 'target', 'reduction', 'name'], varargs=None, keywords=None, defaults=('mean', None)), ('document', '776d536cac47c89073abc7ee524d5aec')) -paddle.fluid.layers.tree_conv (ArgSpec(args=['nodes_vector', 'edge_set', 'output_size', 'num_filters', 'max_depth', 'act', 'param_attr', 'bias_attr', 'name'], varargs=None, keywords=None, defaults=(1, 2, 'tanh', None, None, None)), ('document', '34ea12ac9f10a65dccbc50100d12e607')) +paddle.fluid.layers.tree_conv (ArgSpec(args=['nodes_vector', 'edge_set', 'output_size', 'num_filters', 'max_depth', 'act', 'param_attr', 'bias_attr', 'name'], varargs=None, keywords=None, defaults=(1, 2, 'tanh', None, None, None)), ('document', '2985a372ac897ea4e13aced7f930d6f8')) paddle.fluid.layers.npair_loss (ArgSpec(args=['anchor', 'positive', 'labels', 'l2_reg'], varargs=None, keywords=None, defaults=(0.002,)), ('document', '46994d10276dd4cb803b4062b5d14329')) paddle.fluid.layers.pixel_shuffle (ArgSpec(args=['x', 'upscale_factor'], varargs=None, keywords=None, defaults=None), ('document', '132b6e74ff642a392bd6b14c10aedc65')) paddle.fluid.layers.fsp_matrix (ArgSpec(args=['x', 'y'], varargs=None, keywords=None, defaults=None), ('document', 'b76ccca3735bea4a58a0dbf0d77c5393')) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index facc5ff11e7..428692cc63a 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -11051,21 +11051,19 @@ def tree_conv(nodes_vector, Examples: .. code-block:: python - nodes_vector = layers.data(name='vectors', shape=[None, 10, 5], dtype='float32) - # None for batch size, 10 for max_node_size of dataset, 5 for vector width - edge_set = layers.data(name='edge_set', shape=[None, 10, 2], dtype='float32') - # None for batch size, 10 for max_node_size of dataset, 2 for every edge has two nodes + # 10 for max_node_size of dataset, 5 for vector width + nodes_vector = fluid.layers.data(name='vectors', shape=[10, 5], dtype='float32') + # 10 for max_node_size of dataset, 2 for every edge has two nodes # edges must be directional - out_vector = layers.tree_conv(nodes_vector, edge_set, 6, 1, 2, 'tanh', - ParamAttr(initializer=Constant(1.0), ParamAttr(initializer=Constant(1.0)) - # the shape of output will be [None, 10, 6, 1], - # None for batch size, 10 for max_node_size of dataset, 6 for output size, 1 for 1 filter - out_vector = layers.reshape(out_vector, shape=[None, 10, 6]) + edge_set = fluid.layers.data(name='edge_set', shape=[10, 2], dtype='float32') + # the shape of output will be [10, 6, 1], + # 10 for max_node_size of dataset, 6 for output size, 1 for 1 filter + out_vector = fluid.layers.tree_conv(nodes_vector, edge_set, 6, 1, 2) # After reshape, output tensor could be nodes_vector for next tree convolution - out_vector_2 = layers.tree_conv(out_vector, edge_set, 3, 4, 2, 'tanh', - ParamAttr(initializer=Constant(1.0), ParamAttr(initializer=Constant(1.0)) + out_vector = fluid.layers.reshape(out_vector, shape=[-1, 10, 6]) + out_vector_2 = fluid.layers.tree_conv(out_vector, edge_set, 3, 4, 2) # also output tensor could be pooling(the pooling in paper called global pooling) - pooled = layers.reduce_max(out_vector, dims=2) # global pooling + pooled = fluid.layers.reduce_max(out_vector, dim=2) # global pooling """ helper = LayerHelper("tree_conv", **locals()) dtype = helper.input_dtype('nodes_vector') -- GitLab