test_gradient_checker.py 1.6 KB
Newer Older
D
dangqingqing 已提交
1
import unittest
Q
qijun 已提交
2 3 4 5
import numpy as np
import paddle.v2.framework.core as core
from op_test import get_numeric_gradient
from op_test import create_op
D
dangqingqing 已提交
6 7 8 9


class GetNumericGradientTest(unittest.TestCase):
    def test_add_op(self):
Q
qijun 已提交
10 11 12 13 14
        x = np.random.random((10, 1)).astype("float32")
        y = np.random.random((10, 1)).astype("float32")
        z = x + y
        scope = core.Scope()
        add_op = create_op(scope, "add", {'X': x, 'Y': y}, {'Out': z}, dict())
Y
Yancey 已提交
15 16
        arr = get_numeric_gradient(scope, add_op, {'X': x,
                                                   'Y': y}, 'X', ['Out'])
D
dangqingqing 已提交
17 18 19 20 21
        self.assertAlmostEqual(arr.mean(), 1.0, delta=1e-4)

    def test_softmax_op(self):
        def stable_softmax(x):
            """Compute the softmax of vector x in a numerically stable way."""
Q
qijun 已提交
22 23 24
            shiftx = x - np.max(x)
            exps = np.exp(shiftx)
            return exps / np.sum(exps)
D
dangqingqing 已提交
25 26 27 28

        def label_softmax_grad(Y, dY):
            dX = Y * 0.0
            for i in range(Y.shape[0]):
Q
qijun 已提交
29
                d = np.dot(Y[i, :], dY[i, :])
D
dangqingqing 已提交
30 31 32
                dX[i, :] = Y[i, :] * (dY[i, :] - d)
            return dX

Q
qijun 已提交
33 34 35
        X = np.random.random((2, 2)).astype("float32")
        Y = np.apply_along_axis(stable_softmax, 1, X)
        dY = np.ones(Y.shape)
D
dangqingqing 已提交
36 37
        dX = label_softmax_grad(Y, dY)

Q
qijun 已提交
38 39 40 41 42
        scope = core.Scope()
        softmax_op = create_op(scope, "softmax", {"X": X}, {"Y": Y}, dict())

        arr = get_numeric_gradient(scope, softmax_op, {"X": X}, "X", "Y")
        np.testing.assert_almost_equal(arr, dX, decimal=1e-2)
D
dangqingqing 已提交
43 44


45
if __name__ == "__main__":
D
dangqingqing 已提交
46
    unittest.main()