test_tensor_array.py 3.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
import logging
import paddle.v2.framework.core as core
import unittest
import numpy as np


class TestTensorArray(unittest.TestCase):
    def setUp(self):
        self.ta = core.TensorArray()

        self.batch_size = 10
        self.dim = 2

        # create a LoDTensor
        self.scope = core.Scope()
D
dongzhihong 已提交
16
        var = self.scope.var("test_tensor")
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53
        self.place = core.CPUPlace()
        tensor = var.get_tensor()
        tensor.set_dims([self.batch_size, self.dim])
        tensor.alloc_float(self.place)
        tensor_array = np.array(tensor)
        tensor_array[0, 0] = 0
        tensor_array[1, 0] = 1
        tensor_array[2, 0] = 2
        tensor_array[3, 0] = 3
        tensor_array[4, 0] = 4
        tensor_array[5, 0] = 5
        tensor_array[6, 0] = 6
        tensor_array[7, 0] = 7
        tensor_array[8, 0] = 8
        tensor_array[9, 0] = 9

        lod_py = [[0, 2, 5, 10]]
        lod_tensor = core.LoDTensor(lod_py)
        lod_tensor.set(tensor_array, self.place)

        self.py_seq_meta = [[5, 10, 2], [2, 5, 1], [0, 2, 0]]

        self.tensor = lod_tensor

    def test_unstack(self):
        self.ta.unstack(self.tensor)
        self.assertEqual(self.tensor.get_dims()[0], self.ta.size())

    def test_read(self):
        self.ta.unstack(self.tensor)
        for i in range(self.batch_size):
            tensor = self.ta.read(i)

    def test_write(self):
        self.ta.unstack(self.tensor)

        # create a tensor with shape of [1, self.dim]
D
dongzhihong 已提交
54
        var = self.scope.var("hell")
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
        tensor = var.get_tensor()
        tensor.set_dims([1, self.dim])
        tensor.alloc_float(self.place)
        tensor_array = np.array(tensor)
        for i in range(self.dim):
            tensor_array[0, i] = i
        tensor.set(tensor_array, self.place)

        self.ta.write(2, tensor)

        ta_tensor = self.ta.read(2)
        ta_tensor_array = np.array(ta_tensor)
        self.assertEqual(ta_tensor.get_dims(), [1, self.dim])
        self.assertTrue((tensor_array == ta_tensor_array).all())

    def test_write_shared(self):
        self.ta.unstack(self.tensor)

        # create a tensor with shape of [1, self.dim]
D
dongzhihong 已提交
74
        var = self.scope.var("hell")
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106
        tensor = var.get_tensor()
        tensor.set_dims([1, self.dim])
        tensor.alloc_float(self.place)
        tensor_array = np.array(tensor)
        for i in range(self.dim):
            tensor_array[0, i] = i
        tensor.set(tensor_array, self.place)

        self.ta.write_shared(2, tensor)

        ta_tensor = self.ta.read(2)
        ta_tensor_array = np.array(ta_tensor)
        self.assertEqual(ta_tensor.get_dims(), [1, self.dim])
        self.assertTrue((tensor_array == ta_tensor_array).all())

    def test_unpack(self):
        meta = self.ta.unpack(self.tensor, 0, True)
        self.assertEqual(self.ta.size(), 5)
        self.assertEqual(meta, self.py_seq_meta)

    def test_pack(self):
        meta = self.ta.unpack(self.tensor, 0, True)
        print "meta", meta
        tensor = self.ta.pack(0, meta, self.tensor.lod())
        print np.array(self.tensor)
        print np.array(tensor)
        self.assertTrue((np.array(self.tensor) == np.array(tensor)).all())
        self.assertTrue(tensor.lod(), self.tensor.lod())


if __name__ == '__main__':
    unittest.main()