alexnet.py 1.8 KB
Newer Older
D
dangqingqing 已提交
1 2 3 4
#!/usr/bin/env python

from paddle.trainer_config_helpers import *

5 6
height = 227
width = 227
D
dangqingqing 已提交
7
num_class = 1000
8
batch_size = get_config_arg('batch_size', int, 128)
D
dangqingqing 已提交
9

10 11 12
args = {'height': height, 'width': width, 'color': True, 'num_class': num_class}
define_py_data_sources2(
    "train.list", None, module="provider", obj="process", args=args)
D
dangqingqing 已提交
13 14

settings(
15 16 17 18
    batch_size=batch_size,
    learning_rate=0.01 / batch_size,
    learning_method=MomentumOptimizer(0.9),
    regularization=L2Regularization(0.0005 * batch_size))
D
dangqingqing 已提交
19 20 21

# conv1
net = data_layer('data', size=height * width * 3)
22 23 24 25 26 27 28
net = img_conv_layer(
    input=net,
    filter_size=11,
    num_channels=3,
    num_filters=96,
    stride=4,
    padding=1)
D
dangqingqing 已提交
29
net = img_cmrnorm_layer(input=net, size=5, scale=0.0001, power=0.75)
30
net = img_pool_layer(input=net, pool_size=3, stride=2)
D
dangqingqing 已提交
31 32

# conv2
33 34
net = img_conv_layer(
    input=net, filter_size=5, num_filters=256, stride=1, padding=2, groups=1)
D
dangqingqing 已提交
35 36 37 38
net = img_cmrnorm_layer(input=net, size=5, scale=0.0001, power=0.75)
net = img_pool_layer(input=net, pool_size=3, stride=2)

# conv3
39 40
net = img_conv_layer(
    input=net, filter_size=3, num_filters=384, stride=1, padding=1)
D
dangqingqing 已提交
41
# conv4
42 43
net = img_conv_layer(
    input=net, filter_size=3, num_filters=384, stride=1, padding=1, groups=1)
D
dangqingqing 已提交
44 45

# conv5
46 47
net = img_conv_layer(
    input=net, filter_size=3, num_filters=256, stride=1, padding=1, groups=1)
D
dangqingqing 已提交
48 49
net = img_pool_layer(input=net, pool_size=3, stride=2)

50 51 52 53 54 55 56 57 58 59
net = fc_layer(
    input=net,
    size=4096,
    act=ReluActivation(),
    layer_attr=ExtraAttr(drop_rate=0.5))
net = fc_layer(
    input=net,
    size=4096,
    act=ReluActivation(),
    layer_attr=ExtraAttr(drop_rate=0.5))
D
dangqingqing 已提交
60 61 62
net = fc_layer(input=net, size=1000, act=SoftmaxActivation())

lab = data_layer('label', num_class)
63
loss = cross_entropy(input=net, label=lab)
D
dangqingqing 已提交
64
outputs(loss)