test_array_read_write_op.py 3.4 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Y
Yu Yang 已提交
15
import unittest
Q
Qiao Longfei 已提交
16 17 18
import paddle.v2.fluid.core as core
import paddle.v2.fluid.layers as layers
from paddle.v2.fluid.executor import Executor
F
fengjiayi 已提交
19
from paddle.v2.fluid.backward import append_backward
Y
Yu Yang 已提交
20
from paddle.v2.fluid.framework import default_main_program
21
import numpy
Y
Yu Yang 已提交
22 23 24 25 26 27 28 29 30 31 32 33 34 35 36


class TestArrayReadWrite(unittest.TestCase):
    def test_read_write(self):
        x = [
            layers.data(
                name='x0', shape=[100]), layers.data(
                    name='x1', shape=[100]), layers.data(
                        name='x2', shape=[100])
        ]

        for each_x in x:
            each_x.stop_gradient = False

        i = layers.zeros(shape=[1], dtype='int64')
Y
Yang Yu 已提交
37
        i.stop_gradient = False
Y
Yu Yang 已提交
38
        arr = layers.array_write(x=x[0], i=i)
39
        i = layers.increment(x=i)
Y
Yu Yang 已提交
40
        arr = layers.array_write(x=x[1], i=i, array=arr)
41
        i = layers.increment(x=i)
Y
Yu Yang 已提交
42 43 44
        arr = layers.array_write(x=x[2], i=i, array=arr)

        i = layers.zeros(shape=[1], dtype='int64')
Y
Yang Yu 已提交
45
        i.stop_gradient = False
Y
Yu Yang 已提交
46
        a0 = layers.array_read(array=arr, i=i)
47
        i = layers.increment(x=i)
Y
Yu Yang 已提交
48
        a1 = layers.array_read(array=arr, i=i)
49
        i = layers.increment(x=i)
Y
Yu Yang 已提交
50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
        a2 = layers.array_read(array=arr, i=i)

        mean_a0 = layers.mean(x=a0)
        mean_a1 = layers.mean(x=a1)
        mean_a2 = layers.mean(x=a2)

        a_sum = layers.sums(input=[mean_a0, mean_a1, mean_a2])

        mean_x0 = layers.mean(x=x[0])
        mean_x1 = layers.mean(x=x[1])
        mean_x2 = layers.mean(x=x[2])

        x_sum = layers.sums(input=[mean_x0, mean_x1, mean_x2])

        scope = core.Scope()
        cpu = core.CPUPlace()

        exe = Executor(cpu)

D
dzhwinter 已提交
69 70 71 72 73 74 75
        tensor = numpy.random.random(size=(100, 100)).astype('float32')

        outs = exe.run(feed={'x0': tensor,
                             'x1': tensor,
                             'x2': tensor},
                       fetch_list=[a_sum, x_sum],
                       scope=scope)
Y
Yu Yang 已提交
76 77
        self.assertEqual(outs[0], outs[1])

78 79 80
        total_sum = layers.sums(input=[a_sum, x_sum])
        total_sum_scaled = layers.scale(x=total_sum, scale=1 / 6.0)

F
fengjiayi 已提交
81
        append_backward(total_sum_scaled)
82

Y
Yu Yang 已提交
83
        g_vars = map(default_main_program().global_block().var,
84 85 86
                     [each_x.name + "@GRAD" for each_x in x])
        g_out = [
            item.sum()
D
dzhwinter 已提交
87 88 89 90 91
            for item in exe.run(
                feed={'x0': tensor,
                      'x1': tensor,
                      'x2': tensor},
                fetch_list=g_vars)
92 93 94 95 96 97 98 99
        ]
        g_out_sum = numpy.array(g_out).sum()

        # since our final gradient is 1 and the neural network are all linear
        # with mean_op.
        # the input gradient should also be 1
        self.assertAlmostEqual(1.0, g_out_sum, delta=0.1)

Y
Yu Yang 已提交
100 101 102

if __name__ == '__main__':
    unittest.main()