alexnet.py 4.1 KB
Newer Older
1
import paddle
littletomatodonkey's avatar
littletomatodonkey 已提交
2 3 4 5 6
from paddle import ParamAttr
import paddle.nn as nn
import paddle.nn.functional as F
from paddle.nn import Conv2d, Pool2D, BatchNorm, Linear, Dropout, ReLU
from paddle.nn.initializer import Uniform
W
WuHaobo 已提交
7
import math
8

W
wqz960 已提交
9 10
__all__ = ["AlexNet"]

littletomatodonkey's avatar
littletomatodonkey 已提交
11 12 13 14 15 16 17 18 19 20 21 22

class ConvPoolLayer(nn.Layer):
    def __init__(self,
                 inputc_channels,
                 output_channels,
                 filter_size,
                 stride,
                 padding,
                 stdv,
                 groups=1,
                 act=None,
                 name=None):
W
wqz960 已提交
23 24
        super(ConvPoolLayer, self).__init__()

littletomatodonkey's avatar
littletomatodonkey 已提交
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39
        self.relu = ReLU() if act == "relu" else None

        self._conv = Conv2d(
            in_channels=inputc_channels,
            out_channels=output_channels,
            kernel_size=filter_size,
            stride=stride,
            padding=padding,
            groups=groups,
            weight_attr=ParamAttr(
                name=name + "_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name=name + "_offset", initializer=Uniform(-stdv, stdv)))
        self._pool = Pool2D(
            pool_size=3, pool_stride=2, pool_padding=0, pool_type="max")
40 41

    def forward(self, inputs):
W
wqz960 已提交
42
        x = self._conv(inputs)
littletomatodonkey's avatar
littletomatodonkey 已提交
43 44
        if self.relu is not None:
            x = self.relu(x)
W
wqz960 已提交
45
        x = self._pool(x)
46 47 48
        return x


littletomatodonkey's avatar
littletomatodonkey 已提交
49
class AlexNetDY(nn.Layer):
W
wqz960 已提交
50 51 52
    def __init__(self, class_dim=1000):
        super(AlexNetDY, self).__init__()

littletomatodonkey's avatar
littletomatodonkey 已提交
53
        stdv = 1.0 / math.sqrt(3 * 11 * 11)
W
wqz960 已提交
54
        self._conv1 = ConvPoolLayer(
littletomatodonkey's avatar
littletomatodonkey 已提交
55 56
            3, 64, 11, 4, 2, stdv, act="relu", name="conv1")
        stdv = 1.0 / math.sqrt(64 * 5 * 5)
W
wqz960 已提交
57 58
        self._conv2 = ConvPoolLayer(
            64, 192, 5, 1, 2, stdv, act="relu", name="conv2")
littletomatodonkey's avatar
littletomatodonkey 已提交
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81
        stdv = 1.0 / math.sqrt(192 * 3 * 3)
        self._conv3 = Conv2d(
            192,
            384,
            3,
            stride=1,
            padding=1,
            weight_attr=ParamAttr(
                name="conv3_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="conv3_offset", initializer=Uniform(-stdv, stdv)))
        stdv = 1.0 / math.sqrt(384 * 3 * 3)
        self._conv4 = Conv2d(
            384,
            256,
            3,
            stride=1,
            padding=1,
            weight_attr=ParamAttr(
                name="conv4_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="conv4_offset", initializer=Uniform(-stdv, stdv)))
        stdv = 1.0 / math.sqrt(256 * 3 * 3)
W
wqz960 已提交
82 83
        self._conv5 = ConvPoolLayer(
            256, 256, 3, 1, 1, stdv, act="relu", name="conv5")
littletomatodonkey's avatar
littletomatodonkey 已提交
84
        stdv = 1.0 / math.sqrt(256 * 6 * 6)
W
wqz960 已提交
85 86

        self._drop1 = Dropout(p=0.5)
littletomatodonkey's avatar
littletomatodonkey 已提交
87 88 89 90 91 92 93 94
        self._fc6 = Linear(
            in_features=256 * 6 * 6,
            out_features=4096,
            weight_attr=ParamAttr(
                name="fc6_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="fc6_offset", initializer=Uniform(-stdv, stdv)))

W
wqz960 已提交
95
        self._drop2 = Dropout(p=0.5)
littletomatodonkey's avatar
littletomatodonkey 已提交
96 97 98 99 100 101 102 103 104 105 106 107 108 109
        self._fc7 = Linear(
            in_features=4096,
            out_features=4096,
            weight_attr=ParamAttr(
                name="fc7_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="fc7_offset", initializer=Uniform(-stdv, stdv)))
        self._fc8 = Linear(
            in_features=4096,
            out_features=class_dim,
            weight_attr=ParamAttr(
                name="fc8_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="fc8_offset", initializer=Uniform(-stdv, stdv)))
110 111 112 113

    def forward(self, inputs):
        x = self._conv1(inputs)
        x = self._conv2(x)
W
wqz960 已提交
114
        x = self._conv3(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
115
        x = F.relu(x)
W
wqz960 已提交
116
        x = self._conv4(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
117
        x = F.relu(x)
W
wqz960 已提交
118
        x = self._conv5(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
119
        x = paddle.flatten(x, start_axis=1, stop_axis=-1)
W
wqz960 已提交
120 121
        x = self._drop1(x)
        x = self._fc6(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
122
        x = F.relu(x)
W
wqz960 已提交
123 124
        x = self._drop2(x)
        x = self._fc7(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
125
        x = F.relu(x)
W
wqz960 已提交
126
        x = self._fc8(x)
127
        return x
W
WuHaobo 已提交
128

littletomatodonkey's avatar
littletomatodonkey 已提交
129

W
wqz960 已提交
130 131
def AlexNet(**args):
    model = AlexNetDY(**args)
132
    return model