layers.py 4.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

X
xiaoting 已提交
15 16 17 18 19 20 21 22 23 24 25 26
from __future__ import division
import paddle.fluid as fluid
import numpy as np
from paddle.fluid.dygraph.nn import Conv2D,  Conv2DTranspose , BatchNorm ,Pool2D
import os

# cudnn is not better when batch size is 1.
use_cudnn = False


class conv2d(fluid.dygraph.Layer):
    """docstring for Conv2D"""
27 28
    def __init__(self,
                num_channels,
X
xiaoting 已提交
29 30 31 32 33 34 35 36 37
                num_filters=64,
                filter_size=7,
                stride=1,
                stddev=0.02,
                padding=0,
                norm=True,
                relu=True,
                relufactor=0.0,
                use_bias=False):
38
        super(conv2d, self).__init__()
X
xiaoting 已提交
39 40 41 42

        if use_bias == False:
            con_bias_attr = False
        else:
43
            con_bias_attr = fluid.ParamAttr(initializer=fluid.initializer.Constant(0.0))
X
xiaoting 已提交
44 45

        self.conv = Conv2D(
46
            num_channels=num_channels,
X
xiaoting 已提交
47 48 49 50 51 52 53 54 55
            num_filters=num_filters,
            filter_size=filter_size,
            stride=stride,
            padding=padding,
            use_cudnn=use_cudnn,
            param_attr=fluid.ParamAttr(
                initializer=fluid.initializer.NormalInitializer(loc=0.0,scale=stddev)),
            bias_attr=con_bias_attr)
        if norm:
56
            self.bn = BatchNorm(
X
xiaoting 已提交
57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81
                num_channels=num_filters,
                param_attr=fluid.ParamAttr(
                    initializer=fluid.initializer.NormalInitializer(1.0,0.02)),
                bias_attr=fluid.ParamAttr(
                    initializer=fluid.initializer.Constant(0.0)),
                trainable_statistics=True
                )
    
        self.relufactor = relufactor
        self.use_bias = use_bias
        self.norm = norm
        self.relu = relu

    
    def forward(self,inputs):
        conv = self.conv(inputs)
        if self.norm:
            conv = self.bn(conv)
        if self.relu:
            conv = fluid.layers.leaky_relu(conv,alpha=self.relufactor)
        return conv


class DeConv2D(fluid.dygraph.Layer):
    def __init__(self,
82
            num_channels,
X
xiaoting 已提交
83 84 85 86 87 88 89 90 91 92 93
            num_filters=64,
            filter_size=7,
            stride=1,
            stddev=0.02,
            padding=[0,0],
            outpadding=[0,0,0,0],
            relu=True,
            norm=True,
            relufactor=0.0,
            use_bias=False
            ):
94
        super(DeConv2D,self).__init__()
X
xiaoting 已提交
95 96 97 98

        if use_bias == False:
            de_bias_attr = False
        else:
99
            de_bias_attr = fluid.ParamAttr(initializer=fluid.initializer.Constant(0.0))
X
xiaoting 已提交
100

101 102 103 104 105 106 107 108
        self._deconv = Conv2DTranspose(num_channels,
                                       num_filters,
                                       filter_size=filter_size,
                                       stride=stride,
                                       padding=padding,
                                       param_attr=fluid.ParamAttr(
                                           initializer=fluid.initializer.NormalInitializer(loc=0.0, scale=stddev)),
                                       bias_attr=de_bias_attr)
X
xiaoting 已提交
109 110 111 112



        if norm:
113
            self.bn = BatchNorm(
X
xiaoting 已提交
114 115 116
                num_channels=num_filters,
                param_attr=fluid.ParamAttr(
                    initializer=fluid.initializer.NormalInitializer(1.0, 0.02)),
117
                bias_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(0.0)),
X
xiaoting 已提交
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137
                trainable_statistics=True)        
        self.outpadding = outpadding
        self.relufactor = relufactor
        self.use_bias = use_bias
        self.norm = norm
        self.relu = relu

    def forward(self,inputs):
        #todo: add use_bias
        #if self.use_bias==False:
        conv = self._deconv(inputs)
        #else:
        #    conv = self._deconv(inputs)
        conv = fluid.layers.pad2d(conv, paddings=self.outpadding, mode='constant', pad_value=0.0)

        if self.norm:
            conv = self.bn(conv)
        if self.relu:
            conv = fluid.layers.leaky_relu(conv,alpha=self.relufactor)
        return conv