From 4c85f955d78674a3803fab56c5dfd095bc6fde2b Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Fri, 24 Feb 2017 20:47:01 +0800 Subject: [PATCH] move test module --- python/paddle/v2/layer.py | 129 --------------------------- python/paddle/v2/tests/test_layer.py | 92 ++++++++++++++++++- 2 files changed, 90 insertions(+), 131 deletions(-) diff --git a/python/paddle/v2/layer.py b/python/paddle/v2/layer.py index 618a22024..a3fac6ca6 100644 --- a/python/paddle/v2/layer.py +++ b/python/paddle/v2/layer.py @@ -347,132 +347,3 @@ operator_list = [ ] for op in operator_list: globals()[op[0]] = __convert_to_v2__(op[0], parent_names=op[1]) - - -def test_projection(): - """ - TODO: move to tests file - """ - input = data(name='data', type=data_type.dense_vector(784)) - word = data(name='word', type=data_type.integer_value_sequence(10000)) - fc0 = fc(input=input, size=100, act=conf_helps.SigmoidActivation()) - fc1 = fc(input=input, size=200, act=conf_helps.SigmoidActivation()) - mixed0 = mixed( - size=256, - input=[ - full_matrix_projection(input=fc0), full_matrix_projection(input=fc1) - ]) - with mixed(size=200) as mixed1: - mixed1 += full_matrix_projection(input=fc0) - mixed1 += identity_projection(input=fc1) - - table = table_projection(input=word) - emb0 = mixed(size=512, input=table) - with mixed(size=512) as emb1: - emb1 += table - - scale = scaling_projection(input=fc0) - scale0 = mixed(size=100, input=scale) - with mixed(size=100) as scale1: - scale1 += scale - - dotmul = dotmul_projection(input=fc0) - dotmul0 = mixed(size=100, input=dotmul) - with mixed(size=100) as dotmul1: - dotmul1 += dotmul - - context = context_projection(input=fc0, context_len=5) - context0 = mixed(size=100, input=context) - with mixed(size=100) as context1: - context1 += context - - conv = conv_projection( - input=input, - filter_size=1, - num_channels=1, - num_filters=128, - stride=1, - padding=0) - conv0 = mixed(input=conv, bias_attr=True) - with mixed(bias_attr=True) as conv1: - conv1 += conv - - print parse_network(mixed0) - print parse_network(mixed1) - print parse_network(emb0) - print parse_network(emb1) - print parse_network(scale0) - print parse_network(scale1) - print parse_network(dotmul0) - print parse_network(dotmul1) - print parse_network(conv0) - print parse_network(conv1) - - -def test_operator(): - """ - TODO: move to tests file - """ - ipt0 = data(name='data', type=data_type.dense_vector(784)) - ipt1 = data(name='word', type=data_type.dense_vector(128)) - fc0 = fc(input=ipt0, size=100, act=conf_helps.SigmoidActivation()) - fc1 = fc(input=ipt0, size=100, act=conf_helps.SigmoidActivation()) - - dotmul_op = dotmul_operator(a=fc0, b=fc1) - dotmul0 = mixed(input=dotmul_op) - with mixed() as dotmul1: - dotmul1 += dotmul_op - - conv = conv_operator( - img=ipt0, - filter=ipt1, - filter_size=1, - num_channels=1, - num_filters=128, - stride=1, - padding=0) - conv0 = mixed(input=conv) - with mixed() as conv1: - conv1 += conv - - print parse_network(dotmul0) - print parse_network(dotmul1) - print parse_network(conv0) - print parse_network(conv1) - - -def test_cost(pixel, label, weight, score): - hidden = fc(input=pixel, - size=100, - act=activation.Sigmoid(), - param_attr=attr.Param(name='hidden')) - inference = fc(input=hidden, size=10, act=activation.Softmax()) - maxid = max_id(input=inference) - cost1 = classification_cost(input=inference, label=label) - cost2 = classification_cost(input=inference, label=label, weight=weight) - cost3 = cross_entropy_cost(input=inference, label=label) - cost4 = cross_entropy_with_selfnorm_cost(input=inference, label=label) - cost5 = regression_cost(input=inference, label=label) - cost6 = regression_cost(input=inference, label=label, weight=weight) - cost7 = multi_binary_label_cross_entropy_cost(input=inference, label=label) - cost8 = rank_cost(left=score, right=score, label=score) - cost9 = lambda_cost(input=inference, score=score) - cost10 = sum_cost(input=inference) - cost11 = huber_cost(input=score, label=label) - - print parse_network(cost1, cost2) - print parse_network(cost3, cost4) - print parse_network(cost5, cost6) - print parse_network(cost7, cost8, cost9, cost10, cost11) - print parse_network(inference, maxid) - - -if __name__ == '__main__': - pixel = data(name='pixel', type=data_type.dense_vector(784)) - label = data(name='label', type=data_type.integer_value(10)) - weight = data(name='weight', type=data_type.dense_vector(10)) - score = data(name='score', type=data_type.dense_vector(1)) - - test_cost(pixel, label, weight, score) - test_projection() - test_operator() diff --git a/python/paddle/v2/tests/test_layer.py b/python/paddle/v2/tests/test_layer.py index b600e8cf7..521bc8b40 100644 --- a/python/paddle/v2/tests/test_layer.py +++ b/python/paddle/v2/tests/test_layer.py @@ -19,8 +19,6 @@ import paddle.v2.activation as activation import paddle.v2.attr as attr import paddle.v2.data_type as data_type import paddle.v2.layer as layer -from paddle.trainer_config_helpers.config_parser_utils import \ - parse_network_config as parse_network pixel = layer.data(name='pixel', type=data_type.dense_vector(784)) label = layer.data(name='label', type=data_type.integer_value(10)) @@ -58,6 +56,96 @@ class CostLayerTest(unittest.TestCase): #print layer.parse_network(cost5, cost6) #print layer.parse_network(cost7, cost8, cost9, cost10, cost11) + def test_projection(self): + input = layer.data(name='data', type=data_type.dense_vector(784)) + word = layer.data( + name='word', type=data_type.integer_value_sequence(10000)) + fc0 = layer.fc(input=input, + size=100, + act=conf_helps.SigmoidActivation()) + fc1 = layer.fc(input=input, + size=200, + act=conf_helps.SigmoidActivation()) + mixed0 = layer.mixed( + size=256, + input=[ + layer.full_matrix_projection(input=fc0), + layer.full_matrix_projection(input=fc1) + ]) + with layer.mixed(size=200) as mixed1: + mixed1 += layer.full_matrix_projection(input=fc0) + mixed1 += layer.identity_projection(input=fc1) + + table = layer.table_projection(input=word) + emb0 = layer.mixed(size=512, input=table) + with layer.mixed(size=512) as emb1: + emb1 += table + + scale = layer.scaling_projection(input=fc0) + scale0 = layer.mixed(size=100, input=scale) + with layer.mixed(size=100) as scale1: + scale1 += scale + + dotmul = layer.dotmul_projection(input=fc0) + dotmul0 = layer.mixed(size=100, input=dotmul) + with layer.mixed(size=100) as dotmul1: + dotmul1 += dotmul + + context = layer.context_projection(input=fc0, context_len=5) + context0 = layer.mixed(size=100, input=context) + with layer.mixed(size=100) as context1: + context1 += context + + conv = layer.conv_projection( + input=input, + filter_size=1, + num_channels=1, + num_filters=128, + stride=1, + padding=0) + conv0 = layer.mixed(input=conv, bias_attr=True) + with layer.mixed(bias_attr=True) as conv1: + conv1 += conv + + print layer.parse_network(mixed0) + print layer.parse_network(mixed1) + print layer.parse_network(emb0) + print layer.parse_network(emb1) + print layer.parse_network(scale0) + print layer.parse_network(scale1) + print layer.parse_network(dotmul0) + print layer.parse_network(dotmul1) + print layer.parse_network(conv0) + print layer.parse_network(conv1) + + def test_operator(self): + ipt0 = layer.data(name='data', type=data_type.dense_vector(784)) + ipt1 = layer.data(name='word', type=data_type.dense_vector(128)) + fc0 = layer.fc(input=ipt0, size=100, act=conf_helps.SigmoidActivation()) + fc1 = layer.fc(input=ipt0, size=100, act=conf_helps.SigmoidActivation()) + + dotmul_op = layer.dotmul_operator(a=fc0, b=fc1) + dotmul0 = layer.mixed(input=dotmul_op) + with layer.mixed() as dotmul1: + dotmul1 += dotmul_op + + conv = layer.conv_operator( + img=ipt0, + filter=ipt1, + filter_size=1, + num_channels=1, + num_filters=128, + stride=1, + padding=0) + conv0 = layer.mixed(input=conv) + with layer.mixed() as conv1: + conv1 += conv + + print layer.parse_network(dotmul0) + print layer.parse_network(dotmul1) + print layer.parse_network(conv0) + print layer.parse_network(conv1) + if __name__ == '__main__': unittest.main() -- GitLab