提交 a0e77692 编写于 作者: X xzl

Embarrassed, i forget to do the type check

上级 1906e63f
......@@ -2048,6 +2048,7 @@ class ParameterReluLayer(LayerBase):
def __init__(self, name, inputs, partial_sum=1, **args):
super(ParameterReluLayer, self).__init__(
name, self.layer_type, 0, inputs=inputs, **args)
input_layer = self.get_input_layer(0)
config_assert(len(self.inputs) == 1, "prelu layer has only one input.")
config_assert(input_layer.size % partial_sum == 0,
......
......@@ -6442,9 +6442,9 @@ def prelu_layer(input,
"""
assert isinstance(input, LayerOutput), 'prelu_layer accepts only one input.'
if not param_attr:
param_attr = ParamAttr(initial_mean=0.25,
initial_std=0.0)
param_attr = ParamAttr(initial_mean=0.25, initial_std=0.0)
else:
assert isinstance(param_attr, ParameterAttribute)
......@@ -6469,7 +6469,7 @@ def prelu_layer(input,
name=name,
layer_type=LayerType.PRELU,
parents=input,
num_filters = num_channels,
num_filters=num_channels,
size=l.config.size)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册