From efd2d3351ca165ecf846f5c3dfe43a009e139666 Mon Sep 17 00:00:00 2001 From: wwhu Date: Mon, 29 May 2017 15:11:47 +0800 Subject: [PATCH] add multiplex layer for python API --- .../paddle/trainer_config_helpers/layers.py | 53 ++++++++++++++++ .../tests/configs/file_list.sh | 2 +- .../protostr/test_multiplex_layer.protostr | 63 +++++++++++++++++++ .../tests/configs/test_multiplex_layer.py | 12 ++++ 4 files changed, 129 insertions(+), 1 deletion(-) create mode 100644 python/paddle/trainer_config_helpers/tests/configs/protostr/test_multiplex_layer.protostr create mode 100644 python/paddle/trainer_config_helpers/tests/configs/test_multiplex_layer.py diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index ec81e1dc3d2..049b78d514c 100755 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -119,6 +119,7 @@ __all__ = [ 'eos_layer', 'smooth_l1_cost', 'layer_support', + 'multiplex_layer', ] @@ -185,6 +186,7 @@ class LayerType(object): MAXOUT = "maxout" SPP_LAYER = "spp" PAD_LAYER = "pad" + MULTIPLEX_LAYER = "multiplex" PRINT_LAYER = "print" PRIORBOX_LAYER = "priorbox" @@ -5465,3 +5467,54 @@ def smooth_l1_cost(input, label, name=None, coeff=1.0, layer_attr=None): **ExtraLayerAttribute.to_kwargs(layer_attr)) return LayerOutput( name, LayerType.SMOOTH_L1, parents=[input, label], size=1) + + +@wrap_name_default() +def multiplex_layer(input, name=None, layer_attr=None): + """ + This layer multiplex multiple layers according to the index, + which is provided by the first input layer. + inputs[0]: the index of the layer to output of size batchSize. + inputs[1:N]; the candidate output data. + For each index i from 0 to batchSize -1, the output is the i-th row of the + (index[i] + 1)-th layer. + + For each i-th row of output: + .. math:: + y[i][j] = x_{x_{0}[i] + 1}[i][j], j = 0,1, ... , (x_{1}.width - 1) + + where, y is output. :math:`x_{k}` is the k-th input layer and + :math:`k = x_{0}[i] + 1`. + + .. code-block:: python + + maxid = multiplex_layer(input=layers) + + :param input: Input layers. + :type input: list of LayerOutput + :param name: Layer name. + :type name: basestring + :param layer_attr: extra layer attributes. + :type layer_attr: ExtraLayerAttribute. + :return: LayerOutput object. + :rtype: LayerOutput + """ + + assert isinstance(input, collections.Sequence) + assert len(input) > 2, 'multiplex_layer should have more than 2 inputs' + for i in range(1, len(input)): + assert isinstance(input[i], LayerOutput) + assert input[i].size == input[1].size, \ + "All the input layers except the first one should have the same size" + + l = Layer( + name=name, + type='multiplex', + inputs=[x.name for x in input], + size=input[1].size, + **ExtraLayerAttribute.to_kwargs(layer_attr)) + return LayerOutput( + name=name, + layer_type=LayerType.MULTIPLEX_LAYER, + parents=input, + size=l.config.size) diff --git a/python/paddle/trainer_config_helpers/tests/configs/file_list.sh b/python/paddle/trainer_config_helpers/tests/configs/file_list.sh index c5dc8e1aab0..981ccbf2483 100755 --- a/python/paddle/trainer_config_helpers/tests/configs/file_list.sh +++ b/python/paddle/trainer_config_helpers/tests/configs/file_list.sh @@ -5,6 +5,6 @@ last_first_seq test_expand_layer test_ntm_layers test_hsigmoid img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers test_rnn_group shared_fc shared_lstm shared_gru test_cost_layers_with_weight test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops -test_seq_concat_reshape test_pad test_smooth_l1) +test_seq_concat_reshape test_pad test_smooth_l1 test_multiplex_layer) export whole_configs=(test_split_datasource) diff --git a/python/paddle/trainer_config_helpers/tests/configs/protostr/test_multiplex_layer.protostr b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_multiplex_layer.protostr new file mode 100644 index 00000000000..379842ba8d3 --- /dev/null +++ b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_multiplex_layer.protostr @@ -0,0 +1,63 @@ +type: "nn" +layers { + name: "index" + type: "data" + size: 1 + active_type: "" +} +layers { + name: "data1" + type: "data" + size: 30 + active_type: "" +} +layers { + name: "data2" + type: "data" + size: 30 + active_type: "" +} +layers { + name: "data3" + type: "data" + size: 30 + active_type: "" +} +layers { + name: "__multiplex_layer_0__" + type: "multiplex" + size: 30 + active_type: "" + inputs { + input_layer_name: "index" + } + inputs { + input_layer_name: "data1" + } + inputs { + input_layer_name: "data2" + } + inputs { + input_layer_name: "data3" + } +} +input_layer_names: "index" +input_layer_names: "data1" +input_layer_names: "data2" +input_layer_names: "data3" +output_layer_names: "__multiplex_layer_0__" +sub_models { + name: "root" + layer_names: "index" + layer_names: "data1" + layer_names: "data2" + layer_names: "data3" + layer_names: "__multiplex_layer_0__" + input_layer_names: "index" + input_layer_names: "data1" + input_layer_names: "data2" + input_layer_names: "data3" + output_layer_names: "__multiplex_layer_0__" + is_recurrent_layer_group: false +} + diff --git a/python/paddle/trainer_config_helpers/tests/configs/test_multiplex_layer.py b/python/paddle/trainer_config_helpers/tests/configs/test_multiplex_layer.py new file mode 100644 index 00000000000..d2500019325 --- /dev/null +++ b/python/paddle/trainer_config_helpers/tests/configs/test_multiplex_layer.py @@ -0,0 +1,12 @@ +from paddle.trainer_config_helpers import * + +settings(batch_size=1000, learning_rate=1e-5) + +index = data_layer(name='index', size=1) +din1 = data_layer(name='data1', size=30) +din2 = data_layer(name='data2', size=30) +din3 = data_layer(name='data3', size=30) + +dout = multiplex_layer([index, din1, din2, din3]) + +outputs(dout) -- GitLab