未验证 提交 35fe7f49 编写于 作者: J Jason 提交者: GitHub

Merge pull request #524 from SunAhong1993/develop

fix the parameter path string
......@@ -510,7 +510,7 @@ class PaddleGraph(object):
comment_list = list()
comment_list.append("# There are {} inputs.".format(len(self.inputs_info)))
for k, v in self.inputs_info.items():
comment_list.append("# {}: shape-{},type-{}。".format(k, v[0], v[1]))
comment_list.append("# {}: shape-{}, type-{}.".format(k, v[0], v[1]))
self.run_func.extend(
gen_codes(
comment_list,
......@@ -518,7 +518,7 @@ class PaddleGraph(object):
use_structured_name = False if self.source_type in ["tf"] else True
self.run_func.extend(
gen_codes(["paddle.disable_static()",
"params = paddle.load('{}/model.pdparams')".format(osp.abspath(code_dir)),
"params = paddle.load('{}')".format(osp.join(osp.abspath(code_dir), "model.pdparams")),
"model = {}()".format(self.name),
"model.set_dict(params, use_structured_name={})".format(use_structured_name),
"model.eval()",
......
......@@ -64,6 +64,5 @@ class TraceDecoder(Decoder):
print(e)
exit(0)
self.graph = self._optimize_graph(self.script.inlined_graph)
self.input_examples = input_examples
self.input_examples = input_examples
......@@ -2043,3 +2043,50 @@ class OpSet9():
inputs={"x": val_x.name},
outputs=layer_outputs,
**layer_attrs)
@print_mapping_info
def DepthToSpace(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
blocksize = node.get_attr('blocksize')
mode = node.get_attr('mode', "DCR")
val_x_shape = val_x.out_shapes[0]
b, c, h, w = val_x_shape
if mode == "DCR":
self.paddle_graph.add_layer(
'paddle.reshape',
inputs={"x": val_x.name},
outputs=[node.name],
shape=[b, blocksize, blocksize, c // (blocksize**2), h, w]
)
self.paddle_graph.add_layer(
'paddle.transpose',
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 3, 4, 1, 5, 2]
)
self.paddle_graph.add_layer(
'paddle.reshape',
inputs={"x": node.name},
outputs=[node.name],
shape=[b, c // (blocksize**2), h * blocksize, w * blocksize]
)
else:
self.paddle_graph.add_layer(
'paddle.reshape',
inputs={"x": val_x.name},
outputs=[node.name],
shape=[b, c // (blocksize ** 2), blocksize, blocksize, h, w]
)
self.paddle_graph.add_layer(
'paddle.transpose',
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 1, 4, 2, 5, 3]
)
self.paddle_graph.add_layer(
'paddle.reshape',
inputs={"x": node.name},
outputs=[node.name],
shape=[b, c // (blocksize ** 2), h * blocksize, w * blocksize]
)
\ No newline at end of file
......@@ -2174,12 +2174,16 @@ def aten_hardtanh_(mapper, graph, node):
# 处理输入2,即%66
layer_attrs["max"] = mapper.attrs[inputs_name[2]]
graph.add_layer(
'paddle.nn.Hardtanh',
inputs=layer_inputs,
outputs=layer_outputs,
scope_name=scope_name,
**layer_attrs)
if layer_attrs["min"] ==0 and layer_attrs["max"] == 6:
graph.add_layer(
"paddle.nn.ReLU6", inputs=layer_inputs, outputs=layer_outputs, scope_name=scope_name)
else:
graph.add_layer(
'paddle.nn.Hardtanh',
inputs=layer_inputs,
outputs=layer_outputs,
scope_name=scope_name,
**layer_attrs)
return current_inputs, current_outputs
......
......@@ -1526,3 +1526,21 @@ class TFOpMapper(OpMapper):
inputs=inputs,
outputs=[node.name],
**attr)
def ReverseV2(self, node):
x = self.graph.get_input_node(node, 0)
axis = self.graph.get_input_node(node, 1)
inputs = {"x": x.name}
attr = dict()
if axis.layer_type == 'Const':
axis = axis.value.tolist()
if not isinstance(axis, list):
axis = [axis]
attr['axis'] = axis
else:
inputs['axis'] = axis.name
self.paddle_graph.add_layer(
"paddle.flip",
inputs=inputs,
outputs=[node.name],
**attr)
......@@ -1810,3 +1810,50 @@ class OpSet9():
inputs={"x": val_x.name},
outputs=[node.name],
**layer_attrs)
@print_mapping_info
def DepthToSpace(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
blocksize = node.get_attr('blocksize')
mode = node.get_attr('mode', "DCR")
val_x_shape = val_x.out_shapes[0]
b, c, h, w = val_x_shape
if mode == "DCR":
self.paddle_graph.add_layer(
'paddle.reshape',
inputs={"x": val_x.name},
outputs=[node.name],
shape=[b, blocksize, blocksize, c // (blocksize**2), h, w]
)
self.paddle_graph.add_layer(
'paddle.transpose',
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 3, 4, 1, 5, 2]
)
self.paddle_graph.add_layer(
'paddle.reshape',
inputs={"x": node.name},
outputs=[node.name],
shape=[b, c // (blocksize**2), h * blocksize, w * blocksize]
)
else:
self.paddle_graph.add_layer(
'paddle.reshape',
inputs={"x": val_x.name},
outputs=[node.name],
shape=[b, c // (blocksize ** 2), blocksize, blocksize, h, w]
)
self.paddle_graph.add_layer(
'paddle.transpose',
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 1, 4, 2, 5, 3]
)
self.paddle_graph.add_layer(
'paddle.reshape',
inputs={"x": node.name},
outputs=[node.name],
shape=[b, c // (blocksize ** 2), h * blocksize, w * blocksize]
)
\ No newline at end of file
......@@ -1509,3 +1509,22 @@ class TFOpMapper(OpMapper):
inputs=inputs,
outputs=[node.name],
**attr)
def ReverseV2(self, node):
x = self.graph.get_input_node(node, 0)
axis = self.graph.get_input_node(node, 1)
inputs = {"x": x.name}
attr = dict()
if axis.layer_type == 'Const':
axis = axis.value.tolist()
if not isinstance(axis, list):
axis = [axis]
attr['axis'] = axis
else:
inputs['axis'] = axis.name
self.paddle_graph.add_layer(
"paddle.flip",
inputs=inputs,
outputs=[node.name],
**attr)
......@@ -336,8 +336,23 @@ class HierarchicalTree(Tree):
else:
module_name = module._get_name()
if module_name in module_name2sub_layers:
module_name2sub_layers[module_name].append(sub_layers)
module_name2sub_identifiers[module_name].append(sub_identifiers)
if len(sub_layers[list(sub_layers.keys())[-1]].outputs) != \
len(module_name2sub_layers[module_name][0][list(module_name2sub_layers[module_name][0].keys())[-1]].outputs):
while module_name in module_name2sub_layers:
module_name = module_name + "__tmp"
if module_name in module_name2sub_layers and \
len(sub_layers[list(sub_layers.keys())[-1]].outputs) == \
len(module_name2sub_layers[module_name][0][list(module_name2sub_layers[module_name][0].keys())[-1]].outputs):
break
if module_name not in module_name2sub_layers:
module_name2sub_layers[module_name] = [sub_layers]
module_name2sub_identifiers[module_name] = [sub_identifiers]
else:
module_name2sub_layers[module_name].append(sub_layers)
module_name2sub_identifiers[module_name].append(sub_identifiers)
else:
module_name2sub_layers[module_name].append(sub_layers)
module_name2sub_identifiers[module_name].append(sub_identifiers)
else:
module_name2sub_layers[module_name] = [sub_layers]
module_name2sub_identifiers[module_name] = [sub_identifiers]
......@@ -385,10 +400,10 @@ class HierarchicalTree(Tree):
run_func_list.append("def main({}):".format(input_data_name))
run_func_list.append(" # There are {} inputs.".format(len(self.pd_graph.inputs_info)))
for k, v in self.pd_graph.inputs_info.items():
run_func_list.append(" # {}: shape-{},type-{}。".format(k, v[0], v[1]))
run_func_list.append(" # {}: shape-{}, type-{}.".format(k, v[0], v[1]))
run_func_list.extend(
[" paddle.disable_static()",
" params = paddle.load('{}/model.pdparams')".format(osp.abspath(save_dir)),
" params = paddle.load('{}')".format(osp.join(osp.abspath(save_dir), "model.pdparams")),
" model = {}()".format(self.pd_graph.name),
" model.set_dict(params)",
" model.eval()",
......
......@@ -351,10 +351,10 @@ class ModuleGraph(object):
run_func_list.append("def main({}):".format(input_data_name))
run_func_list.append(" # There are {} inputs.".format(len(self.pd_graph.inputs_info)))
for k, v in self.pd_graph.inputs_info.items():
run_func_list.append(" # {}: shape-{}type-{}.".format(k, v[0], v[1]))
run_func_list.append(" # {}: shape-{}, type-{}.".format(k, v[0], v[1]))
run_func_list.extend(
[" paddle.disable_static()",
" params = paddle.load('{}/model.pdparams')".format(osp.abspath(save_dir)),
" params = paddle.load('{}')".format(osp.join(osp.abspath(save_dir), "model.pdparams")),
" model = {}()".format(self.pd_graph.name),
" model.set_dict(params)",
" model.eval()",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册