提交 08a817e3 编写于 作者: C caoying03

delete unnecessary parameters and modifications for some mathmatical

layers.
上级 ce939b30
...@@ -1803,9 +1803,8 @@ class ConvTransLayer(ConvTransLayerBase): ...@@ -1803,9 +1803,8 @@ class ConvTransLayer(ConvTransLayerBase):
@config_layer('norm') @config_layer('norm')
class NormLayer(LayerBase): class NormLayer(LayerBase):
def __init__(self, name, inputs, device=None, **xargs): def __init__(self, name, inputs, **xargs):
super(NormLayer, self).__init__( super(NormLayer, self).__init__(name, 'norm', 0, inputs=inputs, **xargs)
name, 'norm', 0, inputs=inputs, device=device, **xargs)
for input_index in xrange(len(self.inputs)): for input_index in xrange(len(self.inputs)):
input_layer = self.get_input_layer(input_index) input_layer = self.get_input_layer(input_index)
norm_conf = self.config.inputs[input_index].norm_conf norm_conf = self.config.inputs[input_index].norm_conf
...@@ -1817,9 +1816,8 @@ class NormLayer(LayerBase): ...@@ -1817,9 +1816,8 @@ class NormLayer(LayerBase):
@config_layer('pool') @config_layer('pool')
class PoolLayer(LayerBase): class PoolLayer(LayerBase):
def __init__(self, name, inputs, device=None, **xargs): def __init__(self, name, inputs, **xargs):
super(PoolLayer, self).__init__( super(PoolLayer, self).__init__(name, 'pool', 0, inputs=inputs, **xargs)
name, 'pool', 0, inputs=inputs, device=device, **xargs)
for input_index in xrange(len(self.inputs)): for input_index in xrange(len(self.inputs)):
input_layer = self.get_input_layer(input_index) input_layer = self.get_input_layer(input_index)
pool_conf = self.config.inputs[input_index].pool_conf pool_conf = self.config.inputs[input_index].pool_conf
...@@ -1851,7 +1849,6 @@ class BatchNormLayer(LayerBase): ...@@ -1851,7 +1849,6 @@ class BatchNormLayer(LayerBase):
inputs, inputs,
active_type="linear", active_type="linear",
bias=True, bias=True,
device=None,
use_global_stats=True, use_global_stats=True,
moving_average_fraction=0.9, moving_average_fraction=0.9,
batch_norm_type=None, batch_norm_type=None,
...@@ -1893,7 +1890,6 @@ class BatchNormLayer(LayerBase): ...@@ -1893,7 +1890,6 @@ class BatchNormLayer(LayerBase):
0, 0,
active_type=active_type, active_type=active_type,
inputs=inputs, inputs=inputs,
device=device,
**xargs) **xargs)
if use_global_stats is not None: if use_global_stats is not None:
...@@ -1927,9 +1923,9 @@ class BatchNormLayer(LayerBase): ...@@ -1927,9 +1923,9 @@ class BatchNormLayer(LayerBase):
@config_layer('trans') @config_layer('trans')
class TransLayer(LayerBase): class TransLayer(LayerBase):
def __init__(self, name, inputs, device=None, **xargs): def __init__(self, name, inputs, **xargs):
super(TransLayer, self).__init__( super(TransLayer, self).__init__(
name, 'trans', 0, inputs=inputs, device=device, **xargs) name, 'trans', 0, inputs=inputs, **xargs)
config_assert( config_assert(
len(self.inputs) == 1, len(self.inputs) == 1,
'TransLayer must have one and only one input') 'TransLayer must have one and only one input')
...@@ -1938,9 +1934,9 @@ class TransLayer(LayerBase): ...@@ -1938,9 +1934,9 @@ class TransLayer(LayerBase):
@config_layer('resize') @config_layer('resize')
class ResizeLayer(LayerBase): class ResizeLayer(LayerBase):
def __init__(self, name, size, inputs, device=None, **xargs): def __init__(self, name, size, inputs, **xargs):
super(ResizeLayer, self).__init__( super(ResizeLayer, self).__init__(
name, 'resize', size=size, inputs=inputs, device=device, **xargs) name, 'resize', size=size, inputs=inputs, **xargs)
config_assert( config_assert(
len(self.inputs) == 1, len(self.inputs) == 1,
'ResizeLayer must have one and only one input') 'ResizeLayer must have one and only one input')
...@@ -2265,15 +2261,9 @@ def Generator( ...@@ -2265,15 +2261,9 @@ def Generator(
@config_layer('expand') @config_layer('expand')
class ExpandLayer(LayerBase): class ExpandLayer(LayerBase):
def __init__(self, def __init__(self, name, inputs, trans_type='non-seq', bias=False, **xargs):
name,
inputs,
trans_type='non-seq',
device=None,
bias=False,
**xargs):
super(ExpandLayer, self).__init__( super(ExpandLayer, self).__init__(
name, 'expand', 0, inputs=inputs, device=device, **xargs) name, 'expand', 0, inputs=inputs, **xargs)
config_assert( config_assert(
len(self.inputs) == 2, 'ExpandLayer takes 2 and only 2 inputs') len(self.inputs) == 2, 'ExpandLayer takes 2 and only 2 inputs')
self.config.trans_type = trans_type self.config.trans_type = trans_type
...@@ -2304,12 +2294,10 @@ class MaxLayer(LayerBase): ...@@ -2304,12 +2294,10 @@ class MaxLayer(LayerBase):
inputs, inputs,
trans_type='non-seq', trans_type='non-seq',
active_type='linear', active_type='linear',
device=None,
bias=False, bias=False,
output_max_index=None, output_max_index=None,
**xargs): **xargs):
super(MaxLayer, self).__init__( super(MaxLayer, self).__init__(name, 'max', 0, inputs=inputs, **xargs)
name, 'max', 0, inputs=inputs, device=device, **xargs)
config_assert(len(self.inputs) == 1, 'MaxLayer must have 1 input') config_assert(len(self.inputs) == 1, 'MaxLayer must have 1 input')
self.config.trans_type = trans_type self.config.trans_type = trans_type
self.config.active_type = active_type self.config.active_type = active_type
...@@ -2356,7 +2344,6 @@ class SequenceLastInstanceLayer(LayerBase): ...@@ -2356,7 +2344,6 @@ class SequenceLastInstanceLayer(LayerBase):
inputs, inputs,
active_type='linear', active_type='linear',
trans_type='non-seq', trans_type='non-seq',
device=None,
bias=False, bias=False,
**xargs): **xargs):
super(SequenceLastInstanceLayer, self).__init__( super(SequenceLastInstanceLayer, self).__init__(
...@@ -2364,7 +2351,6 @@ class SequenceLastInstanceLayer(LayerBase): ...@@ -2364,7 +2351,6 @@ class SequenceLastInstanceLayer(LayerBase):
'seqlastins', 'seqlastins',
0, 0,
inputs=inputs, inputs=inputs,
device=device,
active_type=active_type, active_type=active_type,
**xargs) **xargs)
config_assert( config_assert(
...@@ -2378,39 +2364,32 @@ class SequenceLastInstanceLayer(LayerBase): ...@@ -2378,39 +2364,32 @@ class SequenceLastInstanceLayer(LayerBase):
@config_layer('seqfirstins') @config_layer('seqfirstins')
class SequenceFirstInstanceLayer(SequenceLastInstanceLayer): class SequenceFirstInstanceLayer(SequenceLastInstanceLayer):
def __init__( def __init__(self,
self,
name, name,
inputs, inputs,
active_type='linear', active_type='linear',
trans_type='non-seq', trans_type='non-seq',
device=None, bias=False,
bias=False, ): **xargs):
super(SequenceFirstInstanceLayer, self).__init__( super(SequenceFirstInstanceLayer, self).__init__(
name, name,
inputs=inputs, inputs=inputs,
active_type=active_type, active_type=active_type,
device=device, device=device,
bias=bias) bias=bias,
**xargs)
self.config.trans_type = trans_type self.config.trans_type = trans_type
self.config.select_first = True self.config.select_first = True
@config_layer('seqconcat') @config_layer('seqconcat')
class SequenceConcatLayer(LayerBase): class SequenceConcatLayer(LayerBase):
def __init__(self, def __init__(self, name, inputs, active_type='linear', bias=False, **xargs):
name,
inputs,
active_type='linear',
device=None,
bias=False,
**xargs):
super(SequenceConcatLayer, self).__init__( super(SequenceConcatLayer, self).__init__(
name, name,
'seqconcat', 'seqconcat',
0, 0,
inputs=inputs, inputs=inputs,
device=device,
active_type=active_type, active_type=active_type,
**xargs) **xargs)
config_assert( config_assert(
...@@ -2428,7 +2407,6 @@ class SequenceReshapeLayer(LayerBase): ...@@ -2428,7 +2407,6 @@ class SequenceReshapeLayer(LayerBase):
size, size,
inputs, inputs,
active_type='linear', active_type='linear',
device=None,
bias=False, bias=False,
**xargs): **xargs):
super(SequenceReshapeLayer, self).__init__( super(SequenceReshapeLayer, self).__init__(
...@@ -2436,7 +2414,6 @@ class SequenceReshapeLayer(LayerBase): ...@@ -2436,7 +2414,6 @@ class SequenceReshapeLayer(LayerBase):
'seqreshape', 'seqreshape',
size, size,
inputs=inputs, inputs=inputs,
device=device,
active_type=active_type, active_type=active_type,
**xargs) **xargs)
config_assert( config_assert(
...@@ -2447,21 +2424,9 @@ class SequenceReshapeLayer(LayerBase): ...@@ -2447,21 +2424,9 @@ class SequenceReshapeLayer(LayerBase):
@config_layer('subseq') @config_layer('subseq')
class SubSequenceLayer(LayerBase): class SubSequenceLayer(LayerBase):
def __init__(self, def __init__(self, name, inputs, active_type='linear', bias=False, **xargs):
name,
inputs,
active_type='linear',
device=None,
bias=False,
**xargs):
super(SubSequenceLayer, self).__init__( super(SubSequenceLayer, self).__init__(
name, name, 'subseq', 0, inputs=inputs, active_type=active_type, **xargs)
'subseq',
0,
inputs=inputs,
device=device,
active_type=active_type,
**xargs)
config_assert(len(inputs) == 3, 'SubSequenceLayer must have 3 inputs') config_assert(len(inputs) == 3, 'SubSequenceLayer must have 3 inputs')
input_layer0 = self.get_input_layer(0) input_layer0 = self.get_input_layer(0)
size = input_layer0.size size = input_layer0.size
...@@ -2471,9 +2436,9 @@ class SubSequenceLayer(LayerBase): ...@@ -2471,9 +2436,9 @@ class SubSequenceLayer(LayerBase):
@config_layer('out_prod') @config_layer('out_prod')
class OuterProdLayer(LayerBase): class OuterProdLayer(LayerBase):
def __init__(self, name, inputs, device=None, **xargs): def __init__(self, name, inputs, device=None):
super(OuterProdLayer, self).__init__( super(OuterProdLayer, self).__init__(
name, 'out_prod', 0, inputs=inputs, device=device, **xargs) name, 'out_prod', 0, inputs=inputs, device=device)
config_assert(len(inputs) == 2, 'OuterProdLayer must have 2 inputs') config_assert(len(inputs) == 2, 'OuterProdLayer must have 2 inputs')
input_layer0 = self.get_input_layer(0) input_layer0 = self.get_input_layer(0)
input_layer1 = self.get_input_layer(1) input_layer1 = self.get_input_layer(1)
...@@ -2482,9 +2447,9 @@ class OuterProdLayer(LayerBase): ...@@ -2482,9 +2447,9 @@ class OuterProdLayer(LayerBase):
@config_layer('power') @config_layer('power')
class PowerLayer(LayerBase): class PowerLayer(LayerBase):
def __init__(self, name, inputs, device=None, **xargs): def __init__(self, name, inputs, device=None):
super(PowerLayer, self).__init__( super(PowerLayer, self).__init__(
name, 'power', 0, inputs=inputs, device=device, **xargs) name, 'power', 0, inputs=inputs, device=device)
config_assert(len(inputs) == 2, 'PowerLayer must have 2 inputs') config_assert(len(inputs) == 2, 'PowerLayer must have 2 inputs')
input_layer1 = self.get_input_layer(1) input_layer1 = self.get_input_layer(1)
self.set_layer_size(input_layer1.size) self.set_layer_size(input_layer1.size)
...@@ -2495,8 +2460,13 @@ class PowerLayer(LayerBase): ...@@ -2495,8 +2460,13 @@ class PowerLayer(LayerBase):
@config_layer('slope_intercept') @config_layer('slope_intercept')
class SlopeInterceptLayer(LayerBase): class SlopeInterceptLayer(LayerBase):
def __init__(self, name, inputs, slope=1.0, intercept=0.0, def __init__(self,
device=None, **xargs): name,
inputs,
slope=1.0,
intercept=0.0,
device=None,
**xargs):
super(SlopeInterceptLayer, self).__init__( super(SlopeInterceptLayer, self).__init__(
name, 'slope_intercept', 0, inputs=inputs, device=device, **xargs) name, 'slope_intercept', 0, inputs=inputs, device=device, **xargs)
self.config.slope = slope self.config.slope = slope
...@@ -2508,9 +2478,9 @@ class SlopeInterceptLayer(LayerBase): ...@@ -2508,9 +2478,9 @@ class SlopeInterceptLayer(LayerBase):
@config_layer('scaling') @config_layer('scaling')
class ScalingLayer(LayerBase): class ScalingLayer(LayerBase):
def __init__(self, name, inputs, device=None, **xargs): def __init__(self, name, inputs, device=None):
super(ScalingLayer, self).__init__( super(ScalingLayer, self).__init__(
name, 'scaling', 0, inputs=inputs, device=device, **xargs) name, 'scaling', 0, inputs=inputs, device=device)
config_assert(len(inputs) == 2, 'ScalingLayer must have 2 inputs') config_assert(len(inputs) == 2, 'ScalingLayer must have 2 inputs')
input_layer1 = self.get_input_layer(1) input_layer1 = self.get_input_layer(1)
self.set_layer_size(input_layer1.size) self.set_layer_size(input_layer1.size)
...@@ -2521,9 +2491,9 @@ class ScalingLayer(LayerBase): ...@@ -2521,9 +2491,9 @@ class ScalingLayer(LayerBase):
@config_layer('conv_shift') @config_layer('conv_shift')
class ConvShiftLayer(LayerBase): class ConvShiftLayer(LayerBase):
def __init__(self, name, inputs, device=None, **xargs): def __init__(self, name, inputs, device=None):
super(ConvShiftLayer, self).__init__( super(ConvShiftLayer, self).__init__(
name, 'conv_shift', 0, inputs=inputs, device=device, **xargs) name, 'conv_shift', 0, inputs=inputs, device=device)
config_assert(len(inputs) == 2, 'ConvShiftLayer must have 2 inputs') config_assert(len(inputs) == 2, 'ConvShiftLayer must have 2 inputs')
input_layer0 = self.get_input_layer(0) input_layer0 = self.get_input_layer(0)
self.set_layer_size(input_layer0.size) self.set_layer_size(input_layer0.size)
...@@ -2531,9 +2501,9 @@ class ConvShiftLayer(LayerBase): ...@@ -2531,9 +2501,9 @@ class ConvShiftLayer(LayerBase):
@config_layer('convex_comb') @config_layer('convex_comb')
class ConvexCombinationLayer(LayerBase): class ConvexCombinationLayer(LayerBase):
def __init__(self, name, size, inputs, device=None, **xargs): def __init__(self, name, size, inputs, device=None):
super(ConvexCombinationLayer, self).__init__( super(ConvexCombinationLayer, self).__init__(
name, 'convex_comb', size, inputs=inputs, device=device, **xargs) name, 'convex_comb', size, inputs=inputs, device=device)
config_assert( config_assert(
len(self.inputs) == 2, 'ConvexCombinationLayer must have 2 inputs') len(self.inputs) == 2, 'ConvexCombinationLayer must have 2 inputs')
config_assert( config_assert(
...@@ -2572,9 +2542,9 @@ class BilinearInterpLayer(LayerBase): ...@@ -2572,9 +2542,9 @@ class BilinearInterpLayer(LayerBase):
@config_layer('sum_to_one_norm') @config_layer('sum_to_one_norm')
class SumToOneNormLayer(LayerBase): class SumToOneNormLayer(LayerBase):
def __init__(self, name, inputs, device=None, **xargs): def __init__(self, name, inputs, device=None):
super(SumToOneNormLayer, self).__init__( super(SumToOneNormLayer, self).__init__(
name, 'sum_to_one_norm', 0, inputs=inputs, device=device, **xargs) name, 'sum_to_one_norm', 0, inputs=inputs, device=device)
config_assert( config_assert(
len(self.inputs) == 1, 'SumToOneNormLayer must have 1 input') len(self.inputs) == 1, 'SumToOneNormLayer must have 1 input')
input_layer0 = self.get_input_layer(0) input_layer0 = self.get_input_layer(0)
...@@ -2619,17 +2589,10 @@ class AverageLayer(LayerBase): ...@@ -2619,17 +2589,10 @@ class AverageLayer(LayerBase):
average_strategy='average', average_strategy='average',
trans_type='non-seq', trans_type='non-seq',
active_type='linear', active_type='linear',
device=None,
bias=False, bias=False,
**xargs): **xargs):
super(AverageLayer, self).__init__( super(AverageLayer, self).__init__(
name, name, 'average', 0, inputs=inputs, active_type=active_type, **xargs)
'average',
0,
inputs=inputs,
device=device,
active_type=active_type,
**xargs)
self.config.average_strategy = average_strategy self.config.average_strategy = average_strategy
self.config.trans_type = trans_type self.config.trans_type = trans_type
config_assert(len(inputs) == 1, 'AverageLayer must have 1 input') config_assert(len(inputs) == 1, 'AverageLayer must have 1 input')
...@@ -2653,9 +2616,9 @@ class CosSimLayer(LayerBase): ...@@ -2653,9 +2616,9 @@ class CosSimLayer(LayerBase):
@config_layer('tensor') @config_layer('tensor')
class TensorLayer(LayerBase): class TensorLayer(LayerBase):
def __init__(self, name, size, inputs, device=None, bias=True, **xargs): def __init__(self, name, size, inputs, bias=True, **xargs):
super(TensorLayer, self).__init__( super(TensorLayer, self).__init__(
name, 'tensor', size, inputs=inputs, device=device, **xargs) name, 'tensor', size, inputs=inputs, **xargs)
config_assert(len(self.inputs) == 2, 'TensorLayer must have 2 inputs') config_assert(len(self.inputs) == 2, 'TensorLayer must have 2 inputs')
config_assert(size > 0, 'size must be positive') config_assert(size > 0, 'size must be positive')
config_assert(inputs[1].parameter_name == None, config_assert(inputs[1].parameter_name == None,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册