test_fc_op.py 1.4 KB
Newer Older
1 2 3 4
import unittest
import numpy as np
from gradient_checker import GradientChecker, create_op
from op_test_util import OpTestMeta
5
from paddle.v2.framework.op import Operator
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21


class TestFCOp(unittest.TestCase):
    __metaclass__ = OpTestMeta

    def setUp(self):
        self.type = "fc"
        self.inputs = {
            "X": np.random.random((32, 784)).astype("float32"),
            "W": np.random.random((784, 1000)).astype("float32"),
            "b": np.random.random(1000).astype("float32")
        }
        self.attrs = {"activation": "sigmoid"}
        mul_out = np.dot(self.inputs["X"], self.inputs["W"])
        add_out = np.add(mul_out, self.inputs["b"])
        sigmoid_out = 1 / (1 + np.exp(-add_out))
22 23 24 25 26
        self.outputs = {
            "mul_out": mul_out,
            "add_out": add_out,
            "Out": sigmoid_out
        }
27 28 29 30


class TestFCGradOp(GradientChecker):
    def test_normal(self):
31
        self.inputs = {
32 33 34
            "X": np.random.random((32, 256)).astype("float32"),
            "W": np.random.random((256, 100)).astype("float32"),
            "b": np.random.random(100).astype("float32")
35 36
        }
        op = Operator(
37 38 39 40 41 42 43 44 45
            "fc",
            X="X",
            W="W",
            b="b",
            Out="Out",
            mul_out="mul_out",
            add_out="add_out",
            activation="sigmoid")
        self.check_grad(op, self.inputs, ["X", "W", "b"], "Out")
46 47 48 49


if __name__ == '__main__':
    unittest.main()