提交 8200c7ba 编写于 作者: littletomatodonkey's avatar littletomatodonkey

fix dpn

上级 a6f1cdbf
...@@ -19,9 +19,10 @@ from __future__ import print_function ...@@ -19,9 +19,10 @@ from __future__ import print_function
import numpy as np import numpy as np
import sys import sys
import paddle import paddle
import paddle.fluid as fluid from paddle import ParamAttr
from paddle.fluid.param_attr import ParamAttr import paddle.nn as nn
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, BatchNorm, Linear from paddle.nn import Conv2d, Pool2D, BatchNorm, Linear
from paddle.nn.initializer import Uniform
import math import math
...@@ -35,7 +36,7 @@ __all__ = [ ...@@ -35,7 +36,7 @@ __all__ = [
] ]
class ConvBNLayer(fluid.dygraph.Layer): class ConvBNLayer(nn.Layer):
def __init__(self, def __init__(self,
num_channels, num_channels,
num_filters, num_filters,
...@@ -47,15 +48,14 @@ class ConvBNLayer(fluid.dygraph.Layer): ...@@ -47,15 +48,14 @@ class ConvBNLayer(fluid.dygraph.Layer):
name=None): name=None):
super(ConvBNLayer, self).__init__() super(ConvBNLayer, self).__init__()
self._conv = Conv2D( self._conv = Conv2d(
num_channels=num_channels, in_channels=num_channels,
num_filters=num_filters, out_channels=num_filters,
filter_size=filter_size, kernel_size=filter_size,
stride=stride, stride=stride,
padding=pad, padding=pad,
groups=groups, groups=groups,
act=None, weight_attr=ParamAttr(name=name + "_weights"),
param_attr=ParamAttr(name=name + "_weights"),
bias_attr=False) bias_attr=False)
self._batch_norm = BatchNorm( self._batch_norm = BatchNorm(
num_filters, num_filters,
...@@ -71,7 +71,7 @@ class ConvBNLayer(fluid.dygraph.Layer): ...@@ -71,7 +71,7 @@ class ConvBNLayer(fluid.dygraph.Layer):
return y return y
class BNACConvLayer(fluid.dygraph.Layer): class BNACConvLayer(nn.Layer):
def __init__(self, def __init__(self,
num_channels, num_channels,
num_filters, num_filters,
...@@ -83,7 +83,6 @@ class BNACConvLayer(fluid.dygraph.Layer): ...@@ -83,7 +83,6 @@ class BNACConvLayer(fluid.dygraph.Layer):
name=None): name=None):
super(BNACConvLayer, self).__init__() super(BNACConvLayer, self).__init__()
self.num_channels = num_channels self.num_channels = num_channels
self.name = name
self._batch_norm = BatchNorm( self._batch_norm = BatchNorm(
num_channels, num_channels,
...@@ -93,15 +92,14 @@ class BNACConvLayer(fluid.dygraph.Layer): ...@@ -93,15 +92,14 @@ class BNACConvLayer(fluid.dygraph.Layer):
moving_mean_name=name + '_bn_mean', moving_mean_name=name + '_bn_mean',
moving_variance_name=name + '_bn_variance') moving_variance_name=name + '_bn_variance')
self._conv = Conv2D( self._conv = Conv2d(
num_channels=num_channels, in_channels=num_channels,
num_filters=num_filters, out_channels=num_filters,
filter_size=filter_size, kernel_size=filter_size,
stride=stride, stride=stride,
padding=pad, padding=pad,
groups=groups, groups=groups,
act=None, weight_attr=ParamAttr(name=name + "_weights"),
param_attr=ParamAttr(name=name + "_weights"),
bias_attr=False) bias_attr=False)
def forward(self, input): def forward(self, input):
...@@ -110,7 +108,7 @@ class BNACConvLayer(fluid.dygraph.Layer): ...@@ -110,7 +108,7 @@ class BNACConvLayer(fluid.dygraph.Layer):
return y return y
class DualPathFactory(fluid.dygraph.Layer): class DualPathFactory(nn.Layer):
def __init__(self, def __init__(self,
num_channels, num_channels,
num_1x1_a, num_1x1_a,
...@@ -183,14 +181,14 @@ class DualPathFactory(fluid.dygraph.Layer): ...@@ -183,14 +181,14 @@ class DualPathFactory(fluid.dygraph.Layer):
def forward(self, input): def forward(self, input):
# PROJ # PROJ
if isinstance(input, list): if isinstance(input, list):
data_in = fluid.layers.concat([input[0], input[1]], axis=1) data_in = paddle.concat([input[0], input[1]], axis=1)
else: else:
data_in = input data_in = input
if self.has_proj: if self.has_proj:
c1x1_w = self.c1x1_w_func(data_in) c1x1_w = self.c1x1_w_func(data_in)
data_o1, data_o2 = fluid.layers.split( data_o1, data_o2 = paddle.split(
c1x1_w, num_or_sections=[self.num_1x1_c, 2 * self.inc], dim=1) c1x1_w, num_or_sections=[self.num_1x1_c, 2 * self.inc], axis=1)
else: else:
data_o1 = input[0] data_o1 = input[0]
data_o2 = input[1] data_o2 = input[1]
...@@ -199,17 +197,17 @@ class DualPathFactory(fluid.dygraph.Layer): ...@@ -199,17 +197,17 @@ class DualPathFactory(fluid.dygraph.Layer):
c3x3_b = self.c3x3_b_func(c1x1_a) c3x3_b = self.c3x3_b_func(c1x1_a)
c1x1_c = self.c1x1_c_func(c3x3_b) c1x1_c = self.c1x1_c_func(c3x3_b)
c1x1_c1, c1x1_c2 = fluid.layers.split( c1x1_c1, c1x1_c2 = paddle.split(
c1x1_c, num_or_sections=[self.num_1x1_c, self.inc], dim=1) c1x1_c, num_or_sections=[self.num_1x1_c, self.inc], axis=1)
# OUTPUTS # OUTPUTS
summ = fluid.layers.elementwise_add(x=data_o1, y=c1x1_c1) summ = paddle.elementwise_add(x=data_o1, y=c1x1_c1)
dense = fluid.layers.concat([data_o2, c1x1_c2], axis=1) dense = paddle.concat([data_o2, c1x1_c2], axis=1)
# tensor, channels # tensor, channels
return [summ, dense] return [summ, dense]
class DPN(fluid.dygraph.Layer): class DPN(nn.Layer):
def __init__(self, layers=60, class_dim=1000): def __init__(self, layers=60, class_dim=1000):
super(DPN, self).__init__() super(DPN, self).__init__()
...@@ -310,9 +308,8 @@ class DPN(fluid.dygraph.Layer): ...@@ -310,9 +308,8 @@ class DPN(fluid.dygraph.Layer):
self.out = Linear( self.out = Linear(
out_channel, out_channel,
class_dim, class_dim,
param_attr=ParamAttr( weight_attr=ParamAttr(
initializer=fluid.initializer.Uniform(-stdv, stdv), initializer=Uniform(-stdv, stdv), name="fc_weights"),
name="fc_weights"),
bias_attr=ParamAttr(name="fc_offset")) bias_attr=ParamAttr(name="fc_offset"))
def forward(self, input): def forward(self, input):
...@@ -327,11 +324,11 @@ class DPN(fluid.dygraph.Layer): ...@@ -327,11 +324,11 @@ class DPN(fluid.dygraph.Layer):
convX_x_x = self.dpn_func_list[dpn_idx](convX_x_x) convX_x_x = self.dpn_func_list[dpn_idx](convX_x_x)
dpn_idx += 1 dpn_idx += 1
conv5_x_x = fluid.layers.concat(convX_x_x, axis=1) conv5_x_x = paddle.concat(convX_x_x, axis=1)
conv5_x_x = self.conv5_x_x_bn(conv5_x_x) conv5_x_x = self.conv5_x_x_bn(conv5_x_x)
y = self.pool2d_avg(conv5_x_x) y = self.pool2d_avg(conv5_x_x)
y = fluid.layers.reshape(y, shape=[0, -1]) y = paddle.reshape(y, shape=[0, -1])
y = self.out(y) y = self.out(y)
return y return y
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册