lenet.py 2.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#  Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.

import paddle.fluid as fluid
16
from paddle.nn import Conv2d, Pool2D, Linear, ReLU, Sequential, Softmax
17 18 19 20

__all__ = ['LeNet']


21
class LeNet(fluid.dygraph.Layer):
22 23 24 25 26 27 28 29 30 31 32
    """LeNet model from
    `"LeCun Y, Bottou L, Bengio Y, et al. Gradient-based learning applied to document recognition[J]. Proceedings of the IEEE, 1998, 86(11): 2278-2324.`_

    Args:
        num_classes (int): output dim of last fc layer. If num_classes <=0, last fc layer 
                            will not be defined. Default: 10.
        classifier_activation (str): activation for the last fc layer. Default: 'softmax'.

    Examples:
        .. code-block:: python

33
            from paddle.vision.models import LeNet
34 35 36 37 38 39 40 41

            model = LeNet()
    """

    def __init__(self, num_classes=10, classifier_activation='softmax'):
        super(LeNet, self).__init__()
        self.num_classes = num_classes
        self.features = Sequential(
42
            Conv2d(
43
                1, 6, 3, stride=1, padding=1),
L
LielinJiang 已提交
44
            ReLU(),
45
            Pool2D(2, 'max', 2),
46
            Conv2d(
47
                6, 16, 5, stride=1, padding=0),
L
LielinJiang 已提交
48
            ReLU(),
49 50 51 52
            Pool2D(2, 'max', 2))

        if num_classes > 0:
            self.fc = Sequential(
53 54
                Linear(400, 120), Linear(120, 84), Linear(84, 10),
                Softmax())  #Todo: accept any activation
55 56 57 58 59 60 61 62

    def forward(self, inputs):
        x = self.features(inputs)

        if self.num_classes > 0:
            x = fluid.layers.flatten(x, 1)
            x = self.fc(x)
        return x