diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 32e31fe2c446fb5d5e2df0819749a60cb8afdfd2..57d30b088b873a94a11483aea536a9e4f6493129 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -138,14 +138,7 @@ def init_config_environment( g_root_submodel=None, g_submodel_map={}, g_submodel_stack=[], - g_add_submodel_suffix=False, - - # Whether current layer needs to pass the image height and width. - # Default value is true, but if it encounters recurrent_layer_group, - # it will be false. The reason is that image is converted to be sequence, - # image height will be sequence length, and image width will be feature - # length of each timestep. - g_pass_height_width=True, ): + g_add_submodel_suffix=False, ): for k, v in locals().iteritems(): globals()[k] = copy.deepcopy(v) @@ -1437,12 +1430,6 @@ class LayerBase(object): g_current_submodel.layer_names.append(self.config.name) - if self.config.type != 'data' and g_pass_height_width: - height = self.get_input_layer(0).height - width = self.get_input_layer(0).width - if height and width: - self.set_layer_height_width(height, width) - def get_input_layer(self, input_index): return g_layer_map[self.config.inputs[input_index].input_layer_name] @@ -3164,8 +3151,6 @@ class WarpCTCLayer(LayerBase): @config_layer('recurrent_layer_group') class RecurrentLayerGroup(LayerBase): def __init__(self, name, device=None): - global g_pass_height_width - g_pass_height_width = False super(RecurrentLayerGroup, self).__init__( name, 'recurrent_layer_group', 0, inputs=[], device=device) diff --git a/python/paddle/trainer_config_helpers/tests/configs/protostr/test_bilinear_interp.protostr b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_bilinear_interp.protostr index 9fae596f281d44dc24c45cb3c750233266e95948..fd5224ca55cd1f642ca2f927f867a7cbf8a47cf6 100644 --- a/python/paddle/trainer_config_helpers/tests/configs/protostr/test_bilinear_interp.protostr +++ b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_bilinear_interp.protostr @@ -90,8 +90,6 @@ layers { input_layer_name: "__pool_0__" input_parameter_name: "___fc_layer_0__.w0" } - height: 32 - width: 32 } parameters { name: "___conv_0__.w0" diff --git a/python/paddle/trainer_config_helpers/tests/configs/protostr/test_maxout.protostr b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_maxout.protostr index c763a95f9d1aefa022f38e0beef6d1c86ebb360d..03f4f3a31d6c222d949f64341bb8ac4c2a56fc5a 100644 --- a/python/paddle/trainer_config_helpers/tests/configs/protostr/test_maxout.protostr +++ b/python/paddle/trainer_config_helpers/tests/configs/protostr/test_maxout.protostr @@ -153,8 +153,6 @@ layers { img_size_y: 0 } } - height: 24 - width: 24 } layers { name: "__fc_layer_0__" @@ -165,8 +163,6 @@ layers { input_layer_name: "__block_expand_layer_0__" input_parameter_name: "___fc_layer_0__.w0" } - height: 24 - width: 24 } parameters { name: "___conv_0__.w0"