From 91a0c11b194b324fb6abbbcdb0bab48720f1a8d6 Mon Sep 17 00:00:00 2001 From: chengduoZH Date: Fri, 8 Sep 2017 17:24:25 +0800 Subject: [PATCH] Adaptive data structure for SwitchOrderLayer --- paddle/gserver/layers/SwitchOrderLayer.cpp | 7 +++++-- proto/ModelConfig.proto | 1 + python/paddle/trainer_config_helpers/layers.py | 10 ++++++++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/paddle/gserver/layers/SwitchOrderLayer.cpp b/paddle/gserver/layers/SwitchOrderLayer.cpp index 92cd61cdd..d7eee6eaf 100644 --- a/paddle/gserver/layers/SwitchOrderLayer.cpp +++ b/paddle/gserver/layers/SwitchOrderLayer.cpp @@ -24,10 +24,12 @@ bool SwitchOrderLayer::init(const LayerMap& layerMap, /* Initialize the basic parent class */ Layer::init(layerMap, parameterMap); auto& img_conf = config_.inputs(0).image_conf(); + size_t inD = img_conf.img_size_z(); size_t inH = img_conf.has_img_size_y() ? img_conf.img_size_y() : img_conf.img_size(); size_t inW = img_conf.img_size(); size_t inC = img_conf.channels(); + inH = inH * inD; inDims_ = TensorShape({0, inC, inH, inW}); outDims_ = TensorShape(4); @@ -64,9 +66,10 @@ void SwitchOrderLayer::setInDims() { MatrixPtr input = inputLayers_[0]->getOutputValue(); size_t batchSize = input->getHeight(); inDims_.setDim(0, batchSize); - + int d = inputLayers_[0]->getOutput().getFrameDepth(); + d = (d == 0 ? 1 : d); int h = inputLayers_[0]->getOutput().getFrameHeight(); - if (h != 0) inDims_.setDim(2, h); + if (h != 0) inDims_.setDim(2, h * d); int w = inputLayers_[0]->getOutput().getFrameWidth(); if (w != 0) inDims_.setDim(3, w); int totalCount = input->getElementCnt(); diff --git a/proto/ModelConfig.proto b/proto/ModelConfig.proto index 7d7fc23a4..2b3a8d6dc 100644 --- a/proto/ModelConfig.proto +++ b/proto/ModelConfig.proto @@ -271,6 +271,7 @@ message ImageConfig { // The size of input feature map. required uint32 img_size = 8; optional uint32 img_size_y = 9; + optional uint32 img_size_z = 10 [ default = 1 ]; } message PriorBoxConfig { diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index cba45bd3a..9a43cb170 100644 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -6410,7 +6410,7 @@ def gated_unit_layer(input, @wrap_name_default('switch_order') def switch_order_layer(input, name=None, - reshape=None, + reshape_axis=None, act=None, layer_attr=None): """ @@ -6421,8 +6421,9 @@ def switch_order_layer(input, The example usage is: .. code-block:: python + reshape_axis = 3 + switch = switch_order(input=layer, name='switch', reshape_axis=reshape_axis) reshape = {'height':[ 0, 1, 2], 'width':[3]} - switch = switch_order(input=layer, name='switch', reshape=reshape) :param input: The input layer. :type input: LayerOutput @@ -6434,6 +6435,11 @@ def switch_order_layer(input, :rtype: LayerOutput """ assert isinstance(input, LayerOutput) + assert reshape_axis != None and (reshape_axis > 0 and reshape_axis < 4) + height = [ele for ele in xrange(reshape_axis)] + width = [ele for ele in range(reshape_axis, 4)] + reshape = {'height': height, 'width': width} + l = Layer( name=name, inputs=input.name, -- GitLab