layer_activations.py 520 字节
Newer Older
1 2 3 4 5 6
'''
Test all activations.
'''

from paddle.trainer_config_helpers import *

Q
qijun 已提交
7
settings(learning_rate=1e-4, batch_size=1000)
8 9 10 11 12 13

din = data_layer(name='input', size=100)

acts = [
    TanhActivation, SigmoidActivation, SoftmaxActivation, IdentityActivation,
    LinearActivation, ExpActivation, ReluActivation, BReluActivation,
Q
qijun 已提交
14 15
    SoftReluActivation, STanhActivation, AbsActivation, SquareActivation
]
16

Q
qijun 已提交
17 18 19 20 21
outputs([
    fc_layer(
        input=din, size=100, act=act(), name="layer_%d" % i)
    for i, act in enumerate(acts)
])