alexnet.py 4.2 KB
Newer Older
1
import paddle
littletomatodonkey's avatar
littletomatodonkey 已提交
2 3 4
from paddle import ParamAttr
import paddle.nn as nn
import paddle.nn.functional as F
5 6
from paddle.nn import Conv2D, BatchNorm, Linear, Dropout, ReLU
from paddle.nn import AdaptiveAvgPool2D, MaxPool2D, AvgPool2D
littletomatodonkey's avatar
littletomatodonkey 已提交
7
from paddle.nn.initializer import Uniform
W
WuHaobo 已提交
8
import math
9

W
wqz960 已提交
10 11
__all__ = ["AlexNet"]

littletomatodonkey's avatar
littletomatodonkey 已提交
12 13 14

class ConvPoolLayer(nn.Layer):
    def __init__(self,
littletomatodonkey's avatar
fix mv1  
littletomatodonkey 已提交
15
                 input_channels,
littletomatodonkey's avatar
littletomatodonkey 已提交
16 17 18 19 20 21 22 23
                 output_channels,
                 filter_size,
                 stride,
                 padding,
                 stdv,
                 groups=1,
                 act=None,
                 name=None):
W
wqz960 已提交
24 25
        super(ConvPoolLayer, self).__init__()

littletomatodonkey's avatar
littletomatodonkey 已提交
26 27
        self.relu = ReLU() if act == "relu" else None

28
        self._conv = Conv2D(
littletomatodonkey's avatar
fix mv1  
littletomatodonkey 已提交
29
            in_channels=input_channels,
littletomatodonkey's avatar
littletomatodonkey 已提交
30 31 32 33 34 35 36 37 38
            out_channels=output_channels,
            kernel_size=filter_size,
            stride=stride,
            padding=padding,
            groups=groups,
            weight_attr=ParamAttr(
                name=name + "_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name=name + "_offset", initializer=Uniform(-stdv, stdv)))
39
        self._pool = MaxPool2D(kernel_size=3, stride=2, padding=0)
40 41

    def forward(self, inputs):
W
wqz960 已提交
42
        x = self._conv(inputs)
littletomatodonkey's avatar
littletomatodonkey 已提交
43 44
        if self.relu is not None:
            x = self.relu(x)
W
wqz960 已提交
45
        x = self._pool(x)
46 47 48
        return x


littletomatodonkey's avatar
littletomatodonkey 已提交
49
class AlexNetDY(nn.Layer):
W
wqz960 已提交
50 51 52
    def __init__(self, class_dim=1000):
        super(AlexNetDY, self).__init__()

littletomatodonkey's avatar
littletomatodonkey 已提交
53
        stdv = 1.0 / math.sqrt(3 * 11 * 11)
W
wqz960 已提交
54
        self._conv1 = ConvPoolLayer(
littletomatodonkey's avatar
littletomatodonkey 已提交
55 56
            3, 64, 11, 4, 2, stdv, act="relu", name="conv1")
        stdv = 1.0 / math.sqrt(64 * 5 * 5)
W
wqz960 已提交
57 58
        self._conv2 = ConvPoolLayer(
            64, 192, 5, 1, 2, stdv, act="relu", name="conv2")
littletomatodonkey's avatar
littletomatodonkey 已提交
59
        stdv = 1.0 / math.sqrt(192 * 3 * 3)
60
        self._conv3 = Conv2D(
littletomatodonkey's avatar
littletomatodonkey 已提交
61 62 63 64 65 66 67 68 69 70
            192,
            384,
            3,
            stride=1,
            padding=1,
            weight_attr=ParamAttr(
                name="conv3_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="conv3_offset", initializer=Uniform(-stdv, stdv)))
        stdv = 1.0 / math.sqrt(384 * 3 * 3)
71
        self._conv4 = Conv2D(
littletomatodonkey's avatar
littletomatodonkey 已提交
72 73 74 75 76 77 78 79 80 81
            384,
            256,
            3,
            stride=1,
            padding=1,
            weight_attr=ParamAttr(
                name="conv4_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="conv4_offset", initializer=Uniform(-stdv, stdv)))
        stdv = 1.0 / math.sqrt(256 * 3 * 3)
W
wqz960 已提交
82 83
        self._conv5 = ConvPoolLayer(
            256, 256, 3, 1, 1, stdv, act="relu", name="conv5")
littletomatodonkey's avatar
littletomatodonkey 已提交
84
        stdv = 1.0 / math.sqrt(256 * 6 * 6)
W
wqz960 已提交
85

littletomatodonkey's avatar
littletomatodonkey 已提交
86
        self._drop1 = Dropout(p=0.5, mode="downscale_in_infer")
littletomatodonkey's avatar
littletomatodonkey 已提交
87 88 89 90 91 92 93 94
        self._fc6 = Linear(
            in_features=256 * 6 * 6,
            out_features=4096,
            weight_attr=ParamAttr(
                name="fc6_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="fc6_offset", initializer=Uniform(-stdv, stdv)))

littletomatodonkey's avatar
littletomatodonkey 已提交
95
        self._drop2 = Dropout(p=0.5, mode="downscale_in_infer")
littletomatodonkey's avatar
littletomatodonkey 已提交
96 97 98 99 100 101 102 103 104 105 106 107 108 109
        self._fc7 = Linear(
            in_features=4096,
            out_features=4096,
            weight_attr=ParamAttr(
                name="fc7_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="fc7_offset", initializer=Uniform(-stdv, stdv)))
        self._fc8 = Linear(
            in_features=4096,
            out_features=class_dim,
            weight_attr=ParamAttr(
                name="fc8_weights", initializer=Uniform(-stdv, stdv)),
            bias_attr=ParamAttr(
                name="fc8_offset", initializer=Uniform(-stdv, stdv)))
110 111 112 113

    def forward(self, inputs):
        x = self._conv1(inputs)
        x = self._conv2(x)
W
wqz960 已提交
114
        x = self._conv3(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
115
        x = F.relu(x)
W
wqz960 已提交
116
        x = self._conv4(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
117
        x = F.relu(x)
W
wqz960 已提交
118
        x = self._conv5(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
119
        x = paddle.flatten(x, start_axis=1, stop_axis=-1)
W
wqz960 已提交
120 121
        x = self._drop1(x)
        x = self._fc6(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
122
        x = F.relu(x)
W
wqz960 已提交
123 124
        x = self._drop2(x)
        x = self._fc7(x)
littletomatodonkey's avatar
littletomatodonkey 已提交
125
        x = F.relu(x)
W
wqz960 已提交
126
        x = self._fc8(x)
127
        return x
W
WuHaobo 已提交
128

littletomatodonkey's avatar
littletomatodonkey 已提交
129

W
wqz960 已提交
130 131
def AlexNet(**args):
    model = AlexNetDY(**args)
132
    return model