test_gradient_checker.py 1.5 KB
Newer Older
D
dangqingqing 已提交
1 2 3 4 5 6 7 8 9
import unittest
import numpy
from paddle.v2.framework.op import Operator
from gradient_checker import GradientChecker
from gradient_checker import get_numeric_gradient


class GetNumericGradientTest(unittest.TestCase):
    def test_add_op(self):
10
        add_op = Operator("add", X="X", Y="Y", Out="Z")
D
dangqingqing 已提交
11 12 13
        x = numpy.random.random((10, 1)).astype("float32")
        y = numpy.random.random((10, 1)).astype("float32")

14
        arr = get_numeric_gradient(add_op, {"X": x, "Y": y}, "Z", "X")
D
dangqingqing 已提交
15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
        self.assertAlmostEqual(arr.mean(), 1.0, delta=1e-4)

    def test_softmax_op(self):
        def stable_softmax(x):
            """Compute the softmax of vector x in a numerically stable way."""
            shiftx = x - numpy.max(x)
            exps = numpy.exp(shiftx)
            return exps / numpy.sum(exps)

        def label_softmax_grad(Y, dY):
            dX = Y * 0.0
            for i in range(Y.shape[0]):
                d = numpy.dot(Y[i, :], dY[i, :])
                dX[i, :] = Y[i, :] * (dY[i, :] - d)
            return dX

31
        softmax_op = Operator("softmax", Logits="Logits", Out="Out")
D
dangqingqing 已提交
32 33 34 35 36 37

        X = numpy.random.random((2, 2)).astype("float32")
        Y = numpy.apply_along_axis(stable_softmax, 1, X)
        dY = numpy.ones(Y.shape)
        dX = label_softmax_grad(Y, dY)

38
        arr = get_numeric_gradient(softmax_op, {"Logits": X}, "Out", "Logits")
D
dangqingqing 已提交
39 40 41
        numpy.testing.assert_almost_equal(arr, dX, decimal=1e-2)


42
if __name__ == "__main__":
D
dangqingqing 已提交
43
    unittest.main()