initializer.py 4.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
import paddle.v2.framework.framework as framework

__all__ = ['ConstantInitializer', 'UniformInitializer']


class Initializer(object):
    """Base class for variable initializers

    Defines the common interface of variable initializers.
    They add operations to the init program that are used
    to initialize variables. Users should not use this class
    directly, but need to use one of its implementations.
    """

    def __init_(self):
        pass

    def __call__(self, param, block):
        """Add corresponding initialization operations to the network
        """
        raise NotImplementedError()


class ConstantInitializer(Initializer):
    """Implements the constant initializer
    """

    def __init__(self, value=0.0):
        """Constructor for ConstantInitializer

        Args:
            value: constant value to initialize the variable
        """
        assert value is not None
        super(ConstantInitializer, self).__init__()
        self._value = value

    def __call__(self, var, block):
        """Add constant initialization ops for a variable

        Args:
            var: Variable that needs to be initialized
            block: The block in which initialization ops
                   should be added

        Returns:
            the initialization op
        """
        assert isinstance(var, framework.Variable)
        assert isinstance(block, framework.Block)
        # Initialization Ops should be prepended and not appended
        op = block.prepend_op(
            type="fill_constant",
            outputs={"Out": var},
            attrs={
                "shape": var.shape,
                "data_type": int(var.data_type),
                "value": self._value
            })
        var.op = op
        return op


class UniformInitializer(Initializer):
65
    """Implements the random uniform distribution initializer
66 67 68 69 70 71 72 73 74 75 76 77
    """

    def __init__(self, low=-1.0, high=1.0, seed=0):
        """Constructor for UniformInitializer

        Args:
            low: lower boundary of the uniform distribution
            high: upper boundary of the uniform distribution
            seed: random seed
        """
        assert low is not None
        assert high is not None
78
        assert high >= low
79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110
        assert seed is not None
        super(UniformInitializer, self).__init__()
        self._low = low
        self._high = high
        self._seed = seed

    def __call__(self, var, block):
        """Add uniform distribution initialization ops for a variable

        Args:
            var: Variable that needs to be initialized
            block: The block in which initialization ops
                   should be added

        Returns:
            the initialization op
        """
        assert isinstance(var, framework.Variable)
        assert isinstance(block, framework.Block)
        # Initialization Ops should be prepended and not appended
        op = block.prepend_op(
            type="uniform_random",
            outputs={"Out": var},
            attrs={
                "shape": var.shape,
                "data_type": int(var.data_type),
                "min": self._low,
                "max": self._high,
                "seed": self._seed
            })
        var.op = op
        return op
111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158


class NormalInitializer(Initializer):
    """Implements the  random Normal(Gaussian) distribution initializer
    """

    def __init__(self, loc=0.0, scale=1.0, seed=0):
        """Constructor for NormalInitializer

        Args:
            loc: mean of the normal distribution
            scale: standard deviation of the normal distribution
            seed: random seed
        """
        assert loc is not None
        assert scale is not None
        assert seed is not None
        super(NormalInitializer, self).__init__()
        self._mean = loc
        self._std_dev = scale
        self._seed = seed

    def __call__(self, var, block):
        """Add normal distribution initialization ops for a variable

        Args:
            var: Variable that needs to be initialized
            block: The block in which initialization ops
                   should be added

        Returns:
            the initialization op
        """
        assert isinstance(var, framework.Variable)
        assert isinstance(block, framework.Block)
        # Initialization Ops should be prepended and not appended
        op = block.prepend_op(
            type="gaussian_random",
            outputs={"Out": var},
            attrs={
                "shape": var.shape,
                "data_type": int(var.data_type),
                "mean": self._mean,
                "std": self._std_dev,
                "seed": self._seed
            })
        var.op = op
        return op