提交 c31aed4d 编写于 作者: Q qingqing01 提交者: GitHub

Merge pull request #2102 from qingqing01/config_parse

Remove g_pass_height_width in config_parse.py
......@@ -138,14 +138,7 @@ def init_config_environment(
g_root_submodel=None,
g_submodel_map={},
g_submodel_stack=[],
g_add_submodel_suffix=False,
# Whether current layer needs to pass the image height and width.
# Default value is true, but if it encounters recurrent_layer_group,
# it will be false. The reason is that image is converted to be sequence,
# image height will be sequence length, and image width will be feature
# length of each timestep.
g_pass_height_width=True, ):
g_add_submodel_suffix=False, ):
for k, v in locals().iteritems():
globals()[k] = copy.deepcopy(v)
......@@ -1437,12 +1430,6 @@ class LayerBase(object):
g_current_submodel.layer_names.append(self.config.name)
if self.config.type != 'data' and g_pass_height_width:
height = self.get_input_layer(0).height
width = self.get_input_layer(0).width
if height and width:
self.set_layer_height_width(height, width)
def get_input_layer(self, input_index):
return g_layer_map[self.config.inputs[input_index].input_layer_name]
......@@ -3164,8 +3151,6 @@ class WarpCTCLayer(LayerBase):
@config_layer('recurrent_layer_group')
class RecurrentLayerGroup(LayerBase):
def __init__(self, name, device=None):
global g_pass_height_width
g_pass_height_width = False
super(RecurrentLayerGroup, self).__init__(
name, 'recurrent_layer_group', 0, inputs=[], device=device)
......
......@@ -90,8 +90,6 @@ layers {
input_layer_name: "__pool_0__"
input_parameter_name: "___fc_layer_0__.w0"
}
height: 32
width: 32
}
parameters {
name: "___conv_0__.w0"
......
......@@ -153,8 +153,6 @@ layers {
img_size_y: 0
}
}
height: 24
width: 24
}
layers {
name: "__fc_layer_0__"
......@@ -165,8 +163,6 @@ layers {
input_layer_name: "__block_expand_layer_0__"
input_parameter_name: "___fc_layer_0__.w0"
}
height: 24
width: 24
}
parameters {
name: "___conv_0__.w0"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册