From a6c53fc2fcef380784829cfb29764e1a6458827d Mon Sep 17 00:00:00 2001 From: wanghaoshuang Date: Wed, 19 Jul 2017 17:32:05 +0800 Subject: [PATCH] fix python wrapper bugs --- python/paddle/trainer/config_parser.py | 4 ++-- python/paddle/trainer_config_helpers/layers.py | 9 +++++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 6e2f2182340..0a466380aeb 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -3187,8 +3187,8 @@ class SwitchOrderLayer(LayerBase): def __init__(self, name, inputs, reshape, **xargs): super(SwitchOrderLayer, self).__init__( name, 'switch_order', 0, inputs=inputs, **xargs) - self.conf.reshape_conf.heightAxis_ = reshape['height'] - self.conf.reshape_conf.widthAxis_ = reshape['width'] + self.config.reshape_conf.heightAxis.extend(reshape['height']) + self.config.reshape_conf.widthAxis.extend(reshape['width']) # Deprecated, use a new layer specific class instead diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index 1f5b9e999c7..0bcfbe1e0c7 100755 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -5976,7 +5976,11 @@ def gated_unit_layer(input, @layer_support() @wrap_name_default('switch_order') -def switch_order_layer(input, name=None, reshape=None, layer_attr=None): +def switch_order_layer(input, + name=None, + reshape=None, + act=None, + layer_attr=None): """ This layer switch dimension order of image input. From order "batchSize, channels, height, width" @@ -6000,9 +6004,10 @@ def switch_order_layer(input, name=None, reshape=None, layer_attr=None): assert isinstance(input, LayerOutput) l = Layer( name=name, - inputs=input, + inputs=input.name, reshape=reshape, type=LayerType.SWITCH_ORDER_LAYER, + active_type=act.name, **ExtraLayerAttribute.to_kwargs(layer_attr)) return LayerOutput( name=name, -- GitLab