From a0e77692f3c03cc45e3f82af9bfd64fb814a2fdc Mon Sep 17 00:00:00 2001 From: xzl Date: Tue, 14 Nov 2017 22:13:33 +0800 Subject: [PATCH] Embarrassed, i forget to do the type check --- python/paddle/trainer/config_parser.py | 1 + python/paddle/trainer_config_helpers/layers.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 54245ff03e..43b83b4823 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -2048,6 +2048,7 @@ class ParameterReluLayer(LayerBase): def __init__(self, name, inputs, partial_sum=1, **args): super(ParameterReluLayer, self).__init__( name, self.layer_type, 0, inputs=inputs, **args) + input_layer = self.get_input_layer(0) config_assert(len(self.inputs) == 1, "prelu layer has only one input.") config_assert(input_layer.size % partial_sum == 0, diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index ccd9a728cf..5ace7598dc 100644 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -6442,9 +6442,9 @@ def prelu_layer(input, """ assert isinstance(input, LayerOutput), 'prelu_layer accepts only one input.' + if not param_attr: - param_attr = ParamAttr(initial_mean=0.25, - initial_std=0.0) + param_attr = ParamAttr(initial_mean=0.25, initial_std=0.0) else: assert isinstance(param_attr, ParameterAttribute) @@ -6469,7 +6469,7 @@ def prelu_layer(input, name=name, layer_type=LayerType.PRELU, parents=input, - num_filters = num_channels, + num_filters=num_channels, size=l.config.size) -- GitLab