test_layers.py 5.6 KB
Newer Older
F
fengjiayi 已提交
1
import paddle.v2.framework.layers as layers
F
fengjiayi 已提交
2
import paddle.v2.framework.nets as nets
Y
Yu Yang 已提交
3 4 5 6 7 8 9
from paddle.v2.framework.framework import Program, g_program
import paddle.v2.framework.core as core
import unittest


class TestBook(unittest.TestCase):
    def test_fit_a_line(self):
10
        program = Program()
F
fengjiayi 已提交
11
        x = layers.data(
Y
Yu Yang 已提交
12
            name='x', shape=[13], data_type='float32', program=program)
F
fengjiayi 已提交
13
        y_predict = layers.fc(input=x, size=1, act=None, program=program)
Y
Yu Yang 已提交
14

F
fengjiayi 已提交
15
        y = layers.data(
Y
Yu Yang 已提交
16
            name='y', shape=[1], data_type='float32', program=program)
F
fengjiayi 已提交
17 18
        cost = layers.square_error_cost(
            input=y_predict, label=y, program=program)
Y
Yu Yang 已提交
19

F
fengjiayi 已提交
20
        avg_cost = layers.mean(x=cost, program=program)
Y
Yu Yang 已提交
21
        self.assertIsNotNone(avg_cost)
F
fengjiayi 已提交
22
        program.append_backward(avg_cost)
Y
Yu Yang 已提交
23 24 25
        print str(program)

    def test_recognize_digits_mlp(self):
26
        program = Program()
Y
Yu Yang 已提交
27 28

        # Change g_program, so the rest layers use `g_program`
F
fengjiayi 已提交
29
        images = layers.data(
Y
Yu Yang 已提交
30
            name='pixel', shape=[784], data_type='float32', program=program)
F
fengjiayi 已提交
31
        label = layers.data(
Y
Yu Yang 已提交
32
            name='label', shape=[1], data_type='int32', program=program)
F
fengjiayi 已提交
33 34 35 36 37 38 39 40
        hidden1 = layers.fc(input=images, size=128, act='relu', program=program)
        hidden2 = layers.fc(input=hidden1, size=64, act='relu', program=program)
        predict = layers.fc(input=hidden2,
                            size=10,
                            act='softmax',
                            program=program)
        cost = layers.cross_entropy(input=predict, label=label, program=program)
        avg_cost = layers.mean(x=cost, program=program)
Y
Yu Yang 已提交
41
        self.assertIsNotNone(avg_cost)
F
fengjiayi 已提交
42
        print str(program)
43 44

    def test_simple_conv2d(self):
F
fengjiayi 已提交
45 46
        program = Program()
        images = layers.data(
47
            name='pixel', shape=[3, 48, 48], data_type='int32', program=program)
F
fengjiayi 已提交
48
        layers.conv2d(
49 50
            input=images, num_filters=3, filter_size=[4, 4], program=program)

F
fengjiayi 已提交
51
        print str(program)
Y
Yu Yang 已提交
52

F
fengjiayi 已提交
53
    def test_recognize_digits_conv(self):
F
fengjiayi 已提交
54
        program = Program()
F
fengjiayi 已提交
55

F
fengjiayi 已提交
56
        images = layers.data(
F
fengjiayi 已提交
57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87
            name='pixel',
            shape=[1, 28, 28],
            data_type='float32',
            program=program)
        label = layers.data(
            name='label', shape=[1], data_type='int32', program=program)
        conv_pool_1 = nets.simple_img_conv_pool(
            input=images,
            filter_size=5,
            num_filters=2,
            pool_size=2,
            pool_stride=2,
            act="relu",
            program=program)
        conv_pool_2 = nets.simple_img_conv_pool(
            input=conv_pool_1,
            filter_size=5,
            num_filters=4,
            pool_size=2,
            pool_stride=2,
            act="relu",
            program=program)

        predict = layers.fc(input=conv_pool_2,
                            size=10,
                            act="softmax",
                            program=program)
        cost = layers.cross_entropy(input=predict, label=label, program=program)
        avg_cost = layers.mean(x=cost, program=program)

        program.append_backward(avg_cost)
88 89 90

        print str(program)

Q
QI JUN 已提交
91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
    def test_word_embedding(self):
        program = Program()
        dict_size = 10000
        embed_size = 32
        first_word = layers.data(
            name='firstw', shape=[1], data_type='int32', program=program)
        second_word = layers.data(
            name='secondw', shape=[1], data_type='int32', program=program)
        third_word = layers.data(
            name='thirdw', shape=[1], data_type='int32', program=program)
        forth_word = layers.data(
            name='forthw', shape=[1], data_type='int32', program=program)
        next_word = layers.data(
            name='nextw', shape=[1], data_type='int32', program=program)

        embed_param_attr_1 = {
            'name': 'shared_w',
            'init_attr': {
                'max': 1.0,
                'type': 'uniform_random',
                'min': -1.0
            }
        }
        embed_param_attr_2 = {'name': 'shared_w'}

        embed_first = layers.embedding(
            input=first_word,
            size=[dict_size, embed_size],
            data_type='float32',
            param_attr=embed_param_attr_1,
            program=program)
        embed_second = layers.embedding(
            input=second_word,
            size=[dict_size, embed_size],
            data_type='float32',
            param_attr=embed_param_attr_2,
            program=program)

        embed_third = layers.embedding(
            input=third_word,
            size=[dict_size, embed_size],
            data_type='float32',
            param_attr=embed_param_attr_2,
            program=program)
        embed_forth = layers.embedding(
            input=forth_word,
            size=[dict_size, embed_size],
            data_type='float32',
            param_attr=embed_param_attr_2,
            program=program)

        concat_embed = layers.concat(
            input=[embed_first, embed_second, embed_third, embed_forth],
            axis=1,
            program=program)

        hidden1 = layers.fc(input=concat_embed,
                            size=256,
                            act='sigmoid',
                            program=program)
        predict_word = layers.fc(input=hidden1,
                                 size=dict_size,
                                 act='softmax',
                                 program=program)
        cost = layers.cross_entropy(
            input=predict_word, label=next_word, program=program)
        avg_cost = layers.mean(x=cost, program=program)
        self.assertIsNotNone(avg_cost)

        print str(program)

Y
Yu Yang 已提交
162 163 164

if __name__ == '__main__':
    unittest.main()