提交 b670c23e 编写于 作者: J jiangjiajun

modify program

上级 842108b5
...@@ -8,10 +8,10 @@ name_counter = dict() ...@@ -8,10 +8,10 @@ name_counter = dict()
def gen_name(op_name, var_name): def gen_name(op_name, var_name):
name = "{}.{}".format(op_name, var_name) name = "{}_{}".format(op_name, var_name)
if name not in name_counter: if name not in name_counter:
name_counter[name] = 0 name_counter[name] = 0
else: else:
name_counter[name] += 1 name_counter[name] += 1
name = name + "." + str(name_counter[name]) name = name + "_" + str(name_counter[name])
return name return name
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
# limitations under the License. # limitations under the License.
from six import text_type as _text_type from six import text_type as _text_type
from x2paddle import program
import argparse import argparse
import sys import sys
...@@ -120,29 +121,10 @@ def tf2paddle(model_path, ...@@ -120,29 +121,10 @@ def tf2paddle(model_path,
print("Now translating model from tensorflow to paddle.") print("Now translating model from tensorflow to paddle.")
model = TFDecoder(model_path, define_input_shape=define_input_shape) model = TFDecoder(model_path, define_input_shape=define_input_shape)
if not without_data_format_optimization:
mapper = TFOpMapper(model)
optimizer = TFOptimizer(mapper)
# neccesary optimization
optimizer.delete_redundance_code()
# optimizer below is experimental
optimizer.optimize_elementwise_op()
optimizer.merge_activation()
optimizer.merge_bias()
optimizer.optimize_sub_graph()
# optimizer.merge_batch_norm() mapper = TFOpMapperNHWC(model)
# optimizer.merge_prelu() program.build()
else: program.gen_model(save_dir)
mapper = TFOpMapperNHWC(model)
optimizer = TFOptimizer(mapper)
optimizer.delete_redundance_code()
optimizer.strip_graph()
optimizer.merge_activation()
optimizer.merge_bias()
optimizer.make_nchw_input_output()
optimizer.remove_transpose()
mapper.save_inference_model(save_dir, params_merge)
def caffe2paddle(proto, weight, save_dir, caffe_proto, params_merge=False): def caffe2paddle(proto, weight, save_dir, caffe_proto, params_merge=False):
......
...@@ -14,8 +14,13 @@ ...@@ -14,8 +14,13 @@
from __future__ import print_function from __future__ import print_function
from __future__ import division from __future__ import division
import paddle.fluid as fluid
from paddle.fluid.proto import framework_pb2
import numpy
import collections import collections
import sys
import os import os
import six
class PaddleLayer(object): class PaddleLayer(object):
...@@ -25,7 +30,21 @@ class PaddleLayer(object): ...@@ -25,7 +30,21 @@ class PaddleLayer(object):
dict), "parameter 'inputs' for PaddleLayer should be type of dict" dict), "parameter 'inputs' for PaddleLayer should be type of dict"
assert isinstance( assert isinstance(
outputs, outputs,
list), "parameter, 'outputs' for PaddleLayer should be type of list" list), "parameter 'outputs' for PaddleLayer should be type of list"
for k, v in inputs.items():
if isinstance(v, list):
for i in v:
assert isinstance(
i, six.string_types
), "value in inputs should be type of string or list of string"
else:
assert isinstance(v, six.string_types) or isinstance(
v, list
), "value in inputs should be type of string or list of string"
for v in outputs:
assert isinstance(
v, six.
string_types), "elements in outputs should be type of string"
self.kernel = kernel self.kernel = kernel
self.inputs = inputs self.inputs = inputs
self.outputs = outputs self.outputs = outputs
...@@ -41,29 +60,40 @@ class PaddleProgram(object): ...@@ -41,29 +60,40 @@ class PaddleProgram(object):
self.outputs = list() self.outputs = list()
self.parameters = dict() self.parameters = dict()
def clear(self):
self.layers = list()
self.edges_out = dict()
self.edges_in = dict()
self.inputs = list()
self.outputs = list()
self.parameters = dict()
def add_layer(self, kernel, inputs, outputs, **kwargs): def add_layer(self, kernel, inputs, outputs, **kwargs):
layer = PaddleLayer(kernel, inputs, outputs, **kwargs) layer = PaddleLayer(kernel, inputs, outputs, **kwargs)
index = len(self.layers)
self.layers.append(layer) self.layers.append(layer)
return index
def build(self): def build(self):
outputs = dict() outputs_from_nodes = dict()
for i in range(len(self.layers)): for i, layer in enumerate(self.layers):
layer = self.layers[i] for input_key, input_var in layer.inputs.items():
vs = input_var
if not isinstance(vs, list):
vs = [vs]
for v in vs:
assert v in outputs_from_nodes, "Couldn't find {} in previous layers, the layers should be make by topological sort".format(
v)
in_layer_index = outputs_from_nodes[v]
if in_layer_index not in self.edges_out:
self.edges_out[in_layer_index] = list()
self.edges_out[in_layer_index].append(i)
if i not in self.edges_in:
self.edges_in[i] = list()
self.edges_in[i].append(in_layer_index)
for output in layer.outputs: for output in layer.outputs:
outputs[output] = i outputs_from_nodes[output] = i
for k, v in layer.inputs.items():
assert v in outputs, "Couldn't find {} in previous layers, the layers should be make by topological sort".format(
v)
in_layer_index = outputs[v]
if in_layer_index not in self.edges_out:
self.edges_out[in_layer_index] = list()
self.edges_out[in_layer_index].append(i)
if i not in self.edges_in:
self.edges_in[i] = list()
self.edges_in[i].append(in_layer_index)
def get_layer_outputs(self, i): def get_layer_outputs(self, i):
return self.edges_out[i] return self.edges_out[i]
...@@ -80,7 +110,9 @@ class PaddleProgram(object): ...@@ -80,7 +110,9 @@ class PaddleProgram(object):
else: else:
f.write(indent_blank + code_line + '\n') f.write(indent_blank + code_line + '\n')
f = open(os.path.join(code_dir, 'model.py'), 'w') if not os.path.exists(code_dir):
os.makedirs(code_dir)
f = open(os.path.join(code_dir, 'x2paddle_model.py'), 'w')
write_code( write_code(
f, [ f, [
...@@ -90,9 +122,10 @@ class PaddleProgram(object): ...@@ -90,9 +122,10 @@ class PaddleProgram(object):
"", "def x2paddle_net():" "", "def x2paddle_net():"
], ],
indent=0) indent=0)
for i, layer in enumerate(self.layers): for i, layer in enumerate(self.layers):
if self.edges_in.get(i, 0) == 0 and self.edges_out.get(i, 0) == 0: edges_in = self.edges_in.get(i, [])
edges_out = self.edges_out.get(i, [])
if len(edges_in) == 0 and len(edges_out) == 0:
continue continue
line = "" line = ""
...@@ -106,16 +139,87 @@ class PaddleProgram(object): ...@@ -106,16 +139,87 @@ class PaddleProgram(object):
line += " = {}(".format(layer.kernel) line += " = {}(".format(layer.kernel)
for k, v in layer.inputs.items(): for k, v in layer.inputs.items():
line += "{}={}, ".format(k, v) if isinstance(v, list):
line += "{}=[{}], ".format(k, ", ".join(v))
else:
line += "{}={}, ".format(k, v)
for k, v in layer.attrs.items(): for k, v in layer.attrs.items():
line += "{}={}, ".format(k, v) line += "{}={}, ".format(k, v)
line = line.strip(", ") line = line.strip(", ")
line += ")" line += ")"
write_code(f, [line], indent=1) write_code(f, [line], indent=1)
f.close()
def gen_parameters(self, code_dir): write_code(
pass f, [
"return [{}], [{}]".format(", ".join(self.inputs),
", ".join(self.outputs))
],
indent=1)
f.close()
def gen_inference_model(self, model_dir): def gen_model(self, save_dir):
pass code_dir = os.path.join(save_dir, 'model_with_code')
infer_dir = os.path.join(save_dir, 'inference_model')
self.gen_code(code_dir)
sys.path.append(code_dir)
import x2paddle_model
scope = fluid.Scope()
startup_program = fluid.Program()
main_program = fluid.Program()
with fluid.scope_guard(scope):
with fluid.program_guard(main_program, startup_program):
inputs, outputs = x2paddle_model.x2paddle_net()
exe = fluid.Executor(fluid.CPUPlace())
exe.run(startup_program)
param_dir = os.path.join(code_dir, 'weights')
for k, v in self.parameters.items():
if scope.find_var(k):
self.dump_parameter(k, v, param_dir)
def if_exist(var):
b = os.path.exists(
os.path.join(os.path.join(param_dir, var.name)))
return b
fluid.io.load_vars(
exe, param_dir, main_program, predicate=if_exist)
fluid.io.save_inference_model(
dirname=infer_dir,
feeded_var_names=[i.name for i in inputs],
target_vars=outputs,
executor=exe)
def dump_parameter(self, param_name, param, save_dir):
if not os.path.exists(save_dir):
os.makedirs(save_dir)
dtype_map = {
"int16": [framework_pb2.VarType.INT16, 'h'],
"int32": [framework_pb2.VarType.INT32, 'i'],
"int64": [framework_pb2.VarType.INT64, 'q'],
"float16": [framework_pb2.VarType.FP16, 'e'],
"float32": [framework_pb2.VarType.FP32, 'f'],
"float64": [framework_pb2.VarType.FP64, 'd'],
"bool": [framework_pb2.VarType.BOOL, None]
}
shape = param.shape
if str(param.dtype) in ['uint8', 'uint_8', 'bool']:
param = param.astype('int64')
if len(shape) == 0:
assert param.size == 1, "Unexpected situation happend!"
shape = [1]
assert str(
param.dtype) in dtype_map, "Unknown dtype {} of params: {}.".format(
str(param.dtype), param_name)
fp = open(os.path.join(save_dir, param_name), 'wb')
numpy.array([0], dtype='int32').tofile(fp)
numpy.array([0], dtype='int64').tofile(fp)
numpy.array([0], dtype='int32').tofile(fp)
tensor_desc = framework_pb2.VarType.TensorDesc()
tensor_desc.data_type = dtype_map[str(param.dtype)][0]
tensor_desc.dims.extend(shape)
desc_size = tensor_desc.ByteSize()
numpy.array([desc_size], dtype='int32').tofile(fp)
fp.write(tensor_desc.SerializeToString())
param.tofile(fp)
fp.close()
...@@ -89,6 +89,16 @@ class TFGraphNode(GraphNode): ...@@ -89,6 +89,16 @@ class TFGraphNode(GraphNode):
field = getattr(attr, attr.WhichOneof('value')) field = getattr(attr, attr.WhichOneof('value'))
return tensor_util.MakeNdarray(field) return tensor_util.MakeNdarray(field)
@property
def name(self):
multi_out_ops = ['Split', 'SplitV', 'IteratorV2']
if self.layer_type in multi_out_ops:
if self.layer_name.count(':') > 0:
return self.layer_name.replace(':', '_p')
else:
return "{}_p0".format(self.layer_name)
return self.layer_name
def get_attr(self, name): def get_attr(self, name):
if name not in self.layer.attr: if name not in self.layer.attr:
return None return None
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册