diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index dabf8e2953054356278a81e54ae7a7d15e358af9..dc5acb19b35e9eefd063e76618618a3b6b844ca4 100644 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -2704,8 +2704,8 @@ def conv_operator(img, filter, filter_size, num_filters, PaddlePaddle now supports rectangular filters, the filter's shape can be (filter_size, filter_size_y). :type filter_size_y: int - :param num_filter: channel of output data. - :type num_filter: int + :param num_filters: channel of output data. + :type num_filters: int :param num_channel: channel of input data. :type num_channel: int :param stride: The x dimension of the stride. @@ -2726,7 +2726,7 @@ def conv_operator(img, filter, filter_size, num_filters, if padding_y is None: padding_y = padding op = ConvOperator(input_layer_names=[img.name, filter.name], - num_filters = num_filter, + num_filters = num_filters, conv_conf=Conv(filter_size=filter_size, padding=padding, stride=stride, diff --git a/python/paddle/trainer_config_helpers/tests/layers_test_config.py b/python/paddle/trainer_config_helpers/tests/layers_test_config.py index b9eaf2fce7572343e1ab65aa7ae8997f9245e044..cd368d6b12cbdd0bb86a79d47034dc9ccfa2e4e2 100644 --- a/python/paddle/trainer_config_helpers/tests/layers_test_config.py +++ b/python/paddle/trainer_config_helpers/tests/layers_test_config.py @@ -23,6 +23,15 @@ z = out_prod_layer(input1=x, input2=y) x1 = fc_layer(input=x, size=5) y1 = fc_layer(input=y, size=5) + +z1 = mixed_layer(act=LinearActivation(), + input=[conv_operator(img=x1, + filter=y1, + filter_size=1, + num_filters=5, + num_channel=5, + stride=1)]) + y2 = fc_layer(input=y, size=15) cos1 = cos_sim(a=x1, b=y1) @@ -30,7 +39,7 @@ cos3 = cos_sim(a=x1, b=y2, size=3) linear_comb = linear_comb_layer(weights=x1, vectors=y2, size=3) -out = fc_layer(input=[cos1, cos3, linear_comb, z], +out = fc_layer(input=[cos1, cos3, linear_comb, z, z1], size=num_classes, act=SoftmaxActivation())