未验证 提交 aa7ba3e4 编写于 作者: J Jason 提交者: GitHub

Merge pull request #437 from SunAhong1993/develop

add tf optimizer
......@@ -6,7 +6,6 @@ python tools/check_for_lite.py paddle_model/inference_model/__model__
```
> 附:check_for_lite工具并不能完全判断模型是否被支持,PaddleLite详细支持的算子请参考[PaddleLite支持算子集](https://github.com/PaddlePaddle/Paddle-Lite/blob/develop/docs/introduction/support_operation_list.md)
### 二、模型参数合并
X2Paddle转换后产出的路径下包括两个目录,
1. `model_with_code`: 包含保存的参数文件和模型python代码文件,供用户debug
......
......@@ -117,6 +117,7 @@ def tf2paddle(model_path,
from x2paddle.optimizer.tensorflow.bias import BiasOpt
from x2paddle.optimizer.tensorflow.transpose import TransposeOpt
from x2paddle.optimizer.tensorflow.batch_norm import BatchNormOpt
from x2paddle.optimizer.tensorflow.prelu import PReLUOpt
print("Now translating model from tensorflow to paddle.")
model = TFDecoder(model_path, define_input_shape=define_input_shape)
......@@ -125,8 +126,10 @@ def tf2paddle(model_path,
bias_opt = BiasOpt()
transpose_opt = TransposeOpt()
batch_norm_opt = BatchNormOpt()
prelu_opt = PReLUOpt()
bias_opt.run(program)
batch_norm_opt.run(program)
prelu_opt.run(program)
transpose_opt.run(program)
program.gen_model(save_dir)
......
......@@ -882,7 +882,7 @@ class TFOpMapper(OpMapper):
begin = self.graph.get_node(node.layer.input[1])
size = self.graph.get_node(node.layer.input[2])
inputs = {"x": input.name}
inputs = {"input": input.name}
attrs = {}
if begin.layer_type == "Const":
begin = begin.value.tolist()
......@@ -901,20 +901,30 @@ class TFOpMapper(OpMapper):
if size.layer_type == "Const":
size = size.value.tolist()
attrs['shape'] = size
shape = size
else:
shape = size.out_shapes[0]
reshape_name = gen_name("slice", "reshape")
program.add_layer(
kernel="fluid.layers.reshape",
inputs={"x": size.name},
outputs=[reshape_name],
shape=shape)
inputs['shape'] = reshape_name
# reshape_name = gen_name("slice", "reshape")
# program.add_layer(
# kernel="fluid.layers.reshape",
# inputs={"x": size.name},
# outputs=[reshape_name],
# shape=shape)
# inputs['shape'] = reshape_name
# inputs.pop('shape')
program.add_layer(
kernel="fluid.layers.crop_tensor",
kernel="fluid.layers.slice",
inputs=inputs,
outputs=[node.name],
**attrs)
axes=list(range(len(attrs['offsets']))),
starts=attrs['offsets'],
ends=[attrs['offsets'][i] + shape[i] for i in range(len(shape))])
# program.add_layer(
# kernel="fluid.layers.crop_tensor",
# inputs=inputs,
# outputs=[node.name],
# **attrs)
def ResizeNearestNeighbor(self, node):
input = self.graph.get_node(node.layer.input[0])
......
......@@ -20,10 +20,12 @@ class BatchNormOpt:
input_ids0 = graph.edges_in[layer_id]
mul_layer0 = graph.layers[input_ids0[0]]
sub_layer0 = graph.layers[input_ids0[1]]
if mul_layer0.kernel != "fluid.layers.elementwise_mul":
continue
if sub_layer0.kernel != "fluid.layers.elementwise_sub":
continue
axis = mul_layer0.attrs.get('axis', -1)
if axis != -1 and axis != 3:
continue
......@@ -116,7 +118,7 @@ class BatchNormOpt:
other = graph.layers[input_ids6[1]]
if variance.kernel != "fluid.layers.create_parameter":
continue
if other.kernel != "fluid.layers.create_parameter":
if other.kernel != "fluid.layers.fill_constant":
continue
if len(graph.edges_out.get(input_ids6[0], [])) != 1:
continue
......@@ -127,10 +129,6 @@ class BatchNormOpt:
variance_shape = graph.parameters[variance.outputs[0]].shape
if variance_shape != beta_shape:
continue
if other.outputs[0] not in graph.parameters:
continue
if graph.parameters[other.outputs[0]].size != 1:
continue
ids = set([
layer_id, mul_layer0.id, sub_layer0.id, mul_layer1.id, beta.id,
......@@ -163,7 +161,7 @@ class BatchNormOpt:
kernel="fluid.layers.batch_norm",
inputs={"input": "transpose_for_bn"},
outputs=layer.outputs,
epsilon=graph.parameters[other.outputs[0]],
epsilon=other.attrs["value"],
param_attr="'{}'".format(gamma.outputs[0]),
bias_attr="'{}'".format(beta.outputs[0]),
moving_mean_name="'{}'".format(mean.outputs[0]),
......
import copy
import numpy as np
from collections import OrderedDict
from x2paddle.core.program import PaddleLayer
from x2paddle.core.util import *
class PReLUOpt:
def __init__(self):
pass
def run(self, graph):
print("Optimize: PReLUOpt...")
layers = copy.deepcopy(graph.layers)
for layer_id, layer in layers.items():
if layer.kernel != "fluid.layers.elementwise_add":
continue
axis = layer.attrs.get('axis', -1)
if axis != -1 and axis != 3:
continue
input_ids0 = graph.edges_in[layer_id]
relu_layer0 = graph.layers[input_ids0[0]]
mul_layer0 = graph.layers[input_ids0[1]]
if relu_layer0.kernel != "fluid.layers.relu":
continue
if mul_layer0.kernel != "fluid.layers.elementwise_mul":
continue
axis = mul_layer0.attrs.get('axis', -1)
if axis != -1 and axis != 3:
continue
if len(graph.edges_out.get(input_ids0[0], [])) != 1:
continue
if len(graph.edges_out.get(input_ids0[1], [])) != 1:
continue
input_ids1_0 = graph.edges_in[input_ids0[0]]
input_ids1_1 = graph.edges_in[input_ids0[1]]
fill_layer = graph.layers[input_ids1_1[1]]
mul_layer1 = graph.layers[input_ids1_1[0]]
if fill_layer.kernel != "fluid.layers.fill_constant":
continue
if mul_layer1.kernel != "fluid.layers.elementwise_mul":
continue
axis = mul_layer1.attrs.get('axis', -1)
if axis != -1 and axis != 0:
continue
if len(graph.edges_out.get(input_ids1_1[1], [])) != 1:
continue
if len(graph.edges_out.get(input_ids1_0[0], [])) != 3:
continue
input_ids2 = graph.edges_in[input_ids1_1[0]]
alpha = graph.layers[input_ids2[0]]
sub_layer = graph.layers[input_ids2[1]]
if alpha.kernel != "fluid.layers.create_parameter":
continue
if sub_layer.kernel != "fluid.layers.elementwise_sub":
continue
axis = sub_layer.attrs.get('axis', -1)
if axis != -1 and axis != 3:
continue
if len(graph.edges_out.get(input_ids2[0], [])) != 1:
continue
if len(graph.edges_out.get(input_ids2[1], [])) != 1:
continue
if alpha.outputs[0] not in graph.parameters:
continue
input_ids3 = graph.edges_in[input_ids2[1]]
add_layer = graph.layers[input_ids3[0]]
abs_layer = graph.layers[input_ids3[1]]
if abs_layer.kernel != "fluid.layers.abs":
continue
if len(graph.edges_out.get(input_ids3[1], [])) != 1:
continue
ids = set([
layer.id, relu_layer0.id, mul_layer0.id, fill_layer.id, mul_layer1.id, alpha.id,
sub_layer.id, abs_layer.id])
for id in ids:
del graph.layers[id]
if id in graph.edges_in:
del graph.edges_in[id]
if id in graph.edges_out:
del graph.edges_out[id]
copy_layers = copy.deepcopy(graph.layers)
graph.layers = OrderedDict()
for k, v in copy_layers.items():
if k != add_layer.id:
graph.layers[k] = v
continue
graph.layers[k] = v
transpose0 = PaddleLayer(
id='{}_1'.format(k),
kernel="fluid.layers.transpose",
inputs={"x": v.outputs[0]},
outputs=["transpose_for_prelu"],
perm=[0, 3, 1, 2])
prelu = PaddleLayer(
id='{}_2'.format(k),
kernel="fluid.layers.prelu",
inputs={"x": "transpose_for_prelu"},
outputs=layer.outputs,
mode=string("channel"),
param_attr="'{}'".format(alpha.outputs[0]))
transpose1 = PaddleLayer(
id=layer_id,
kernel="fluid.layers.transpose",
inputs={"x": layer.outputs[0]},
outputs=layer.outputs,
perm=[0, 2, 3, 1])
graph.layers[transpose0.id] = transpose0
graph.layers[prelu.id] = prelu
graph.layers[transpose1.id] = transpose1
graph.parameters[alpha.outputs[0]] = np.expand_dims(graph.parameters[alpha.outputs[0]], 0)
graph.build()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册