diff --git a/doc/api/v2/config/layer.rst b/doc/api/v2/config/layer.rst index 203506d7ab84e5a5be2232b077eac2d433a99766..b2b55ec419d2f8453e067f202f6c1b7da6c201de 100644 --- a/doc/api/v2/config/layer.rst +++ b/doc/api/v2/config/layer.rst @@ -335,6 +335,16 @@ bilinear_interp .. autoclass:: paddle.v2.layer.bilinear_interp :noindex: +dot_prod +--------- +.. autoclass:: paddle.v2.layer.dot_prod + :noindex: + +out_prod +-------- +.. autoclass:: paddle.v2.layer.out_prod + :noindex: + power ----- .. autoclass:: paddle.v2.layer.power diff --git a/paddle/gserver/layers/DotProdLayer.cpp b/paddle/gserver/layers/DotProdLayer.cpp index ae71a3d4eb40419c72921cae58594fe0cec3734c..9e2dbe3c3c416f606d2938701f26288642b55267 100644 --- a/paddle/gserver/layers/DotProdLayer.cpp +++ b/paddle/gserver/layers/DotProdLayer.cpp @@ -20,7 +20,7 @@ limitations under the License. */ namespace paddle { /** - * @brief A layer for computing the dot product of two vectors + * @brief A layer for computing the dot product of two vectors. * Input1: vector (batchSize * dim) * Input2: vector (batchSize * dim) * Output: a matrix: (batchSize * 1) @@ -46,7 +46,8 @@ bool DotProdLayer::init(const LayerMap& layerMap, Layer::init(layerMap, parameterMap); CHECK_EQ(inputLayers_.size(), 2U); - CHECK_EQ(1, getSize()) << "Dimension mismatch"; + CHECK_EQ(1UL, getSize()) + << "The output dimensionality of this layer should be fixed to 1."; return true; } @@ -59,6 +60,7 @@ void DotProdLayer::forward(PassType passType) { size_t batchSize = inV0->getHeight(); CHECK_EQ(inV1->getHeight(), batchSize); + CHECK_EQ(inV0->getWidth(), inV1->getWidth()); { REGISTER_TIMER_INFO("FwResetTimer", getName().c_str()); diff --git a/paddle/gserver/tests/test_LayerGrad.cpp b/paddle/gserver/tests/test_LayerGrad.cpp index de2db0b3f7604ee9108dd5ffab7ab9d04e4083d7..fb4eea6f67da9078ef43268a3a1603dc6ccfa652 100644 --- a/paddle/gserver/tests/test_LayerGrad.cpp +++ b/paddle/gserver/tests/test_LayerGrad.cpp @@ -1092,7 +1092,7 @@ TEST(Layer, DotProdLayer) { config.layerConfig.add_inputs(); for (auto useGpu : {false, true}) { - testLayerGrad(config, "dot_prod", 100, false, useGpu); + testLayerGrad(config, "dot_prod", 10, false, useGpu); } } diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 6d1cc5ad709146a2d1d13496e27cb1847c798d58..fab280d1b0892e91547c57b07dc5cd8390b78ec1 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -3214,7 +3214,10 @@ class DotProdLayer(LayerBase): def __init__(self, name, inputs, device=None): super(DotProdLayer, self).__init__( name, 'dot_prod', 0, inputs, device=device) - config_assert(len(inputs) == 2, 'DotProdLayer must have 2 inputs') + config_assert(len(inputs) == 2, 'DotProdLayer must have 2 inputs.') + config_assert( + self.get_input_layer(0).size == self.get_input_layer(1).size, + "Two inputs should have the same size.") self.set_layer_size(1) diff --git a/python/paddle/trainer_config_helpers/tests/configs/file_list.sh b/python/paddle/trainer_config_helpers/tests/configs/file_list.sh index 1c7451e0abf5dc1b99671f292e2ffc2d2282abe9..0b269a1ff76530774b4d23b0867350fd95e081a3 100755 --- a/python/paddle/trainer_config_helpers/tests/configs/file_list.sh +++ b/python/paddle/trainer_config_helpers/tests/configs/file_list.sh @@ -10,6 +10,7 @@ test_prelu_layer test_row_conv test_detection_output_layer test_multibox_loss_la test_recursive_topology test_gated_unit_layer test_clip_layer test_row_l2_norm_layer test_kmax_seq_socre_layer test_sub_nested_seq_select_layer test_scale_shift_layer test_seq_slice_layer test_cross_entropy_over_beam test_roi_pool_layer test_pooling3D_layer -test_conv3d_layer test_deconv3d_layer test_BatchNorm3D test_resize_layer test_scale_sub_region_layer) +test_conv3d_layer test_deconv3d_layer test_BatchNorm3D test_resize_layer test_scale_sub_region_layer +test_dot_prod_layer) export whole_configs=(test_split_datasource) diff --git a/python/paddle/trainer_config_helpers/tests/configs/protostr/test_dot_prod_layer.protostr b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_dot_prod_layer.protostr new file mode 100644 index 0000000000000000000000000000000000000000..f1530c382c3d81a82592af2c43c06eb4278e2b4a --- /dev/null +++ b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_dot_prod_layer.protostr @@ -0,0 +1,38 @@ +type: "nn" +layers { + name: "vector1" + type: "data" + size: 10 + active_type: "" +} +layers { + name: "vector2" + type: "data" + size: 10 + active_type: "" +} +layers { + name: "__dot_prod_layer_0__" + type: "dot_prod" + size: 1 + active_type: "" + inputs { + input_layer_name: "vector1" + } + inputs { + input_layer_name: "vector2" + } +} +input_layer_names: "vector1" +input_layer_names: "vector2" +output_layer_names: "__dot_prod_layer_0__" +sub_models { + name: "root" + layer_names: "vector1" + layer_names: "vector2" + layer_names: "__dot_prod_layer_0__" + input_layer_names: "vector1" + input_layer_names: "vector2" + output_layer_names: "__dot_prod_layer_0__" + is_recurrent_layer_group: false +} diff --git a/python/paddle/trainer_config_helpers/tests/configs/test_dot_prod_layer.py b/python/paddle/trainer_config_helpers/tests/configs/test_dot_prod_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..e52d48dde0084aacd3f7874cc384d59287a0c7d5 --- /dev/null +++ b/python/paddle/trainer_config_helpers/tests/configs/test_dot_prod_layer.py @@ -0,0 +1,7 @@ +from paddle.trainer_config_helpers import * + +vec1 = data_layer(name='vector1', size=10) +vec2 = data_layer(name='vector2', size=10) +dot_product = dot_prod_layer(input1=vec1, input2=vec2) + +outputs(dot_product)