unet.py 5.3 KB
Newer Older
C
chenguowei01 已提交
1
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
C
chenguowei01 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

C
chenguowei01 已提交
15
import paddle.fluid as fluid
R
root 已提交
16
from paddle.fluid.dygraph import Conv2D, Pool2D
C
chenguowei01 已提交
17 18 19 20
try:
    from paddle.fluid.dygraph import SyncBatchNorm as BatchNorm
except:
    from paddle.fluid.dygraph import BatchNorm
C
chenguowei01 已提交
21

C
chenguowei01 已提交
22 23

class UNet(fluid.dygraph.Layer):
C
chenguowei01 已提交
24
    def __init__(self, num_classes, ignore_index=255):
C
chenguowei01 已提交
25
        super().__init__()
C
update  
chenguowei01 已提交
26 27
        self.encode = UnetEncoder()
        self.decode = UnetDecode()
C
chenguowei01 已提交
28 29
        self.get_logit = GetLogit(64, num_classes)
        self.ignore_index = ignore_index
C
chenguowei01 已提交
30
        self.EPS = 1e-5
C
chenguowei01 已提交
31

C
chenguowei01 已提交
32
    def forward(self, x, label=None, mode='train'):
C
chenguowei01 已提交
33 34 35 36 37
        encode_data, short_cuts = self.encode(x)
        decode_data = self.decode(encode_data, short_cuts)
        logit = self.get_logit(decode_data)
        if mode == 'train':
            return self._get_loss(logit, label)
C
chenguowei01 已提交
38
        else:
C
chenguowei01 已提交
39 40 41
            score_map = fluid.layers.softmax(logit, axis=1)
            score_map = fluid.layers.transpose(score_map, [0, 2, 3, 1])
            pred = fluid.layers.argmax(score_map, axis=3)
C
chenguowei01 已提交
42
            pred = fluid.layers.unsqueeze(pred, axes=[3])
C
chenguowei01 已提交
43
            return pred, score_map
C
chenguowei01 已提交
44 45

    def _get_loss(self, logit, label):
46 47
        logit = fluid.layers.transpose(logit, [0, 2, 3, 1])
        label = fluid.layers.transpose(label, [0, 2, 3, 1])
C
chenguowei01 已提交
48 49 50 51 52 53 54 55 56 57
        mask = label != self.ignore_index
        mask = fluid.layers.cast(mask, 'float32')
        loss, probs = fluid.layers.softmax_with_cross_entropy(
            logit,
            label,
            ignore_index=self.ignore_index,
            return_softmax=True,
            axis=1)

        loss = loss * mask
C
chenguowei01 已提交
58 59
        avg_loss = fluid.layers.mean(loss) / (
            fluid.layers.mean(mask) + self.EPS)
C
chenguowei01 已提交
60 61 62 63 64

        label.stop_gradient = True
        mask.stop_gradient = True
        return avg_loss

C
chenguowei01 已提交
65

C
update  
chenguowei01 已提交
66
class UnetEncoder(fluid.dygraph.Layer):
C
chenguowei01 已提交
67 68
    def __init__(self):
        super().__init__()
C
chenguowei01 已提交
69 70 71 72 73
        self.double_conv = DoubleConv(3, 64)
        self.down1 = Down(64, 128)
        self.down2 = Down(128, 256)
        self.down3 = Down(256, 512)
        self.down4 = Down(512, 512)
C
chenguowei01 已提交
74 75 76 77 78 79 80 81 82 83 84 85 86 87 88

    def forward(self, x):
        short_cuts = []
        x = self.double_conv(x)
        short_cuts.append(x)
        x = self.down1(x)
        short_cuts.append(x)
        x = self.down2(x)
        short_cuts.append(x)
        x = self.down3(x)
        short_cuts.append(x)
        x = self.down4(x)
        return x, short_cuts


C
update  
chenguowei01 已提交
89
class UnetDecode(fluid.dygraph.Layer):
C
chenguowei01 已提交
90
    def __init__(self):
C
chenguowei01 已提交
91
        super().__init__()
C
chenguowei01 已提交
92 93 94 95
        self.up1 = Up(512, 256)
        self.up2 = Up(256, 128)
        self.up3 = Up(128, 64)
        self.up4 = Up(64, 64)
C
chenguowei01 已提交
96 97 98 99 100 101 102 103 104 105 106 107

    def forward(self, x, short_cuts):
        x = self.up1(x, short_cuts[3])
        x = self.up2(x, short_cuts[2])
        x = self.up3(x, short_cuts[1])
        x = self.up4(x, short_cuts[0])
        return x


class DoubleConv(fluid.dygraph.Layer):
    def __init__(self, num_channels, num_filters):
        super().__init__()
C
chenguowei01 已提交
108 109 110 111 112 113 114 115 116 117 118 119 120 121
        self.conv0 = Conv2D(
            num_channels=num_channels,
            num_filters=num_filters,
            filter_size=3,
            stride=1,
            padding=1)
        self.bn0 = BatchNorm(num_channels=num_filters)
        self.conv1 = Conv2D(
            num_channels=num_filters,
            num_filters=num_filters,
            filter_size=3,
            stride=1,
            padding=1)
        self.bn1 = BatchNorm(num_channels=num_filters)
C
chenguowei01 已提交
122 123 124 125 126 127 128 129 130 131 132 133 134 135

    def forward(self, x):
        x = self.conv0(x)
        x = self.bn0(x)
        x = fluid.layers.relu(x)
        x = self.conv1(x)
        x = self.bn1(x)
        x = fluid.layers.relu(x)
        return x


class Down(fluid.dygraph.Layer):
    def __init__(self, num_channels, num_filters):
        super().__init__()
C
chenguowei01 已提交
136 137 138
        self.max_pool = Pool2D(
            pool_size=2, pool_type='max', pool_stride=2, pool_padding=0)
        self.double_conv = DoubleConv(num_channels, num_filters)
C
chenguowei01 已提交
139 140 141 142 143 144 145 146

    def forward(self, x):
        x = self.max_pool(x)
        x = self.double_conv(x)
        return x


class Up(fluid.dygraph.Layer):
C
chenguowei01 已提交
147
    def __init__(self, num_channels, num_filters):
C
chenguowei01 已提交
148
        super().__init__()
C
chenguowei01 已提交
149
        self.double_conv = DoubleConv(2 * num_channels, num_filters)
C
chenguowei01 已提交
150 151

    def forward(self, x, short_cut):
C
chenguowei01 已提交
152 153
        short_cut_shape = fluid.layers.shape(short_cut)
        x = fluid.layers.resize_bilinear(x, short_cut_shape[2:])
C
chenguowei01 已提交
154 155 156 157 158 159 160 161
        x = fluid.layers.concat([x, short_cut], axis=1)
        x = self.double_conv(x)
        return x


class GetLogit(fluid.dygraph.Layer):
    def __init__(self, num_channels, num_classes):
        super().__init__()
C
chenguowei01 已提交
162 163 164 165 166 167
        self.conv = Conv2D(
            num_channels=num_channels,
            num_filters=num_classes,
            filter_size=3,
            stride=1,
            padding=1)
C
chenguowei01 已提交
168 169 170 171

    def forward(self, x):
        x = self.conv(x)
        return x