提交 c0dabbac 编写于 作者: J jiangjiajun

add save inference model func

上级 63a3bd17
...@@ -58,8 +58,7 @@ def tf2paddle(model_path, save_dir): ...@@ -58,8 +58,7 @@ def tf2paddle(model_path, save_dir):
print("Now translating model from tensorflow to paddle.") print("Now translating model from tensorflow to paddle.")
model = TFDecoder(model_path) model = TFDecoder(model_path)
mapper = TFOpMapper(model) mapper = TFOpMapper(model)
mapper.run() mapper.save_inference_model(save_dir)
mapper.save_python_model(save_dir)
def caffe2paddle(proto, weight, save_dir, caffe_proto): def caffe2paddle(proto, weight, save_dir, caffe_proto):
...@@ -69,8 +68,7 @@ def caffe2paddle(proto, weight, save_dir, caffe_proto): ...@@ -69,8 +68,7 @@ def caffe2paddle(proto, weight, save_dir, caffe_proto):
print("Now translating model from caffe to paddle.") print("Now translating model from caffe to paddle.")
model = CaffeDecoder(proto, weight, caffe_proto) model = CaffeDecoder(proto, weight, caffe_proto)
mapper = CaffeOpMapper(model) mapper = CaffeOpMapper(model)
mapper.run() mapper.save_inference_model(save_dir)
mapper.save_python_model(save_dir)
def main(): def main():
......
...@@ -113,6 +113,13 @@ class Graph(object): ...@@ -113,6 +113,13 @@ class Graph(object):
idx = self.topo_sort.index(node_name) idx = self.topo_sort.index(node_name)
del self.topo_sort[idx] del self.topo_sort[idx]
if node_name in self.input_nodes:
idx = self.input_nodes.index(node_name)
del self.input_nodes[idx]
if node_name in self.output_nodes:
idx = self.output_nodes.index(node_name)
del self.output_nodes[idx]
def print(self): def print(self):
for i, tmp in enumerate(self.topo_sort): for i, tmp in enumerate(self.topo_sort):
print(tmp, self.node_map[tmp].layer_type, self.node_map[tmp].inputs, print(tmp, self.node_map[tmp].layer_type, self.node_map[tmp].inputs,
......
...@@ -23,6 +23,8 @@ class OpMapper(object): ...@@ -23,6 +23,8 @@ class OpMapper(object):
self.tab = " " self.tab = " "
self.net_code = list() self.net_code = list()
self.weights = dict() self.weights = dict()
self.inputs = list()
self.outputs = list()
def op_checker(self): def op_checker(self):
unsupported_ops = set() unsupported_ops = set()
...@@ -56,16 +58,83 @@ class OpMapper(object): ...@@ -56,16 +58,83 @@ class OpMapper(object):
self.add_codes("import paddle.fluid as fluid") self.add_codes("import paddle.fluid as fluid")
self.add_codes("") self.add_codes("")
def save_inference_model(self): def save_inference_model(self, save_dir):
print("Not Implement") self.save_python_model(save_dir)
import sys
import paddle.fluid as fluid
py_code_dir = os.path.join(save_dir, "model_with_code")
sys.path.append(py_code_dir)
import model
try:
inputs, outputs = model.x2paddle_net()
input_names = [input.name for input in inputs]
exe = fluid.Executor(fluid.CPUPlace())
exe.run(fluid.default_startup_program())
def if_exist(var):
b = os.path.exists(
os.path.join(os.path.join(save_dir, var.name)))
return b
fluid.io.load_vars(exe,
save_dir,
fluid.default_main_program(),
predicate=if_exist)
fluid.io.save_inference_model(dirname=os.path.join(
save_dir, "inference_model"),
feeded_var_names=input_names,
target_vars=outputs,
executor=exe,
params_filename="__params__")
except:
raise Exception(
"Paddle code was saved in {}/model.py, but seems there's wrong exist, please check model.py manually."
.format(py_code_dir))
def save_python_model(self, save_dir): def save_python_model(self, save_dir):
if not os.path.exists(save_dir):
os.makedirs(save_dir)
py_code_dir = os.path.join(save_dir, "model_with_code")
if not os.path.exists(py_code_dir):
os.makedirs(py_code_dir)
for name, param in self.weights.items(): for name, param in self.weights.items():
export_paddle_param(param, name, save_dir) export_paddle_param(param, name, py_code_dir)
self.add_heads() self.add_heads()
self.add_codes(self.net_code)
self.add_codes("") if hasattr(self, "used_custom_layers"):
self.add_codes(inspect.getsourcelines(init_net)[0]) for _, layer_code in self.used_custom_layers.items():
fp = open(os.path.join(save_dir, "model.py"), 'w') self.add_codes(layer_code, 0)
self.add_codes("\ndef x2paddle_net():", 0)
for i in range(len(self.graph.topo_sort)):
node_name = self.graph.topo_sort[i]
if hasattr(self, "omit_nodes") and node_name in self.omit_nodes:
continue
node = self.graph.get_node(node_name)
self.add_codes(node.fluid_code.gen_codes(), 1)
self.add_codes("", 0)
input_str = "["
for name in self.graph.input_nodes:
input_str += (name + ", ")
input_str = input_str.strip(", ") + "]"
output_str = "["
for name in self.graph.output_nodes:
output_str += (name + ", ")
output_str = output_str.strip(", ") + "]"
return_code = "return {}, {}".format(input_str, output_str)
self.add_codes(return_code, 1)
self.add_codes("", 0)
self.add_codes(inspect.getsourcelines(run_net)[0])
fp = open(os.path.join(py_code_dir, "model.py"), 'w')
fp.write(self.paddle_codes) fp.write(self.paddle_codes)
fp.close() fp.close()
...@@ -55,8 +55,6 @@ def export_paddle_param(param, param_name, dir): ...@@ -55,8 +55,6 @@ def export_paddle_param(param, param_name, dir):
assert param.size == 1, "Unexpected situation happend!" assert param.size == 1, "Unexpected situation happend!"
shape = [1] shape = [1]
assert str(param.dtype) in dtype_map, "Unknown dtype of params." assert str(param.dtype) in dtype_map, "Unknown dtype of params."
if not os.path.exists(dir):
os.makedirs(dir)
fp = open(os.path.join(dir, param_name), 'wb') fp = open(os.path.join(dir, param_name), 'wb')
numpy.array([0], dtype='int32').tofile(fp) numpy.array([0], dtype='int32').tofile(fp)
...@@ -72,8 +70,9 @@ def export_paddle_param(param, param_name, dir): ...@@ -72,8 +70,9 @@ def export_paddle_param(param, param_name, dir):
fp.close() fp.close()
def init_net(param_dir="./"): def run_net(param_dir="./"):
import os import os
inputs, outputs = x2paddle_net()
exe = fluid.Executor(fluid.CUDAPlace(0)) exe = fluid.Executor(fluid.CUDAPlace(0))
exe.run(fluid.default_startup_program()) exe.run(fluid.default_startup_program())
...@@ -85,3 +84,9 @@ def init_net(param_dir="./"): ...@@ -85,3 +84,9 @@ def init_net(param_dir="./"):
param_dir, param_dir,
fluid.default_main_program(), fluid.default_main_program(),
predicate=if_exist) predicate=if_exist)
fluid.io.save_inference_model(dirname='inference_model',
feeded_var_names=[i.name for i in inputs],
target_vars=outputs,
executor=exe,
params_filename="__params__")
...@@ -163,6 +163,10 @@ class TFGraph(Graph): ...@@ -163,6 +163,10 @@ class TFGraph(Graph):
idx = self.topo_sort.index(node_name) idx = self.topo_sort.index(node_name)
del self.topo_sort[idx] del self.topo_sort[idx]
if node_name in self.output_nodes:
idx = self.output_nodes.index(node_name)
self.output_nodes[idx] = input_node.layer_name
class TFDecoder(object): class TFDecoder(object):
def __init__(self, pb_model): def __init__(self, pb_model):
......
...@@ -26,7 +26,7 @@ class CaffeOpMapper(OpMapper): ...@@ -26,7 +26,7 @@ class CaffeOpMapper(OpMapper):
self.graph = decoder.caffe_graph self.graph = decoder.caffe_graph
self.weights = dict() self.weights = dict()
resolver = decoder.resolver resolver = decoder.resolver
self.mylayers = {} self.used_custom_layers = {}
self.inputs = self.graph.input_nodes self.inputs = self.graph.input_nodes
self.outputs = self.graph.output_nodes self.outputs = self.graph.output_nodes
if resolver.has_pycaffe(): if resolver.has_pycaffe():
...@@ -67,8 +67,8 @@ class CaffeOpMapper(OpMapper): ...@@ -67,8 +67,8 @@ class CaffeOpMapper(OpMapper):
self.deal_custom_layer(node) self.deal_custom_layer(node)
else: else:
raise Exception("Model are not supported yet.") raise Exception("Model are not supported yet.")
for key in self.mylayers: for key in self.used_custom_layers:
self.net_code.append(self.mylayers[key]) self.net_code.append(self.used_custom_layers[key])
for i in range(len(self.graph.topo_sort)): for i in range(len(self.graph.topo_sort)):
node_name = self.graph.topo_sort[i] node_name = self.graph.topo_sort[i]
...@@ -1050,5 +1050,5 @@ class CaffeOpMapper(OpMapper): ...@@ -1050,5 +1050,5 @@ class CaffeOpMapper(OpMapper):
output=node, output=node,
param_attr=kwargs, param_attr=kwargs,
is_custom_layer=True) is_custom_layer=True)
if op not in self.mylayers: if op not in self.used_custom_layers:
self.mylayers[op] = custom_code self.used_custom_layers[op] = custom_code
...@@ -51,12 +51,15 @@ class TFOpMapper(OpMapper): ...@@ -51,12 +51,15 @@ class TFOpMapper(OpMapper):
self.weights = dict() self.weights = dict()
self.omit_nodes = list() self.omit_nodes = list()
def run(self): not_placeholder = list()
print("Total nodes: {}".format(len(self.graph.topo_sort))) for name in self.graph.input_nodes:
if self.graph.get_node(name).layer_type != "Placeholder":
# check if ops in model are all supported not_placeholder.append(name)
# TODO for name in not_placeholder:
idx = self.graph.input_nodes.index(name)
del self.graph.input_nodes[idx]
print("Total nodes: {}".format(len(self.graph.topo_sort)))
for node_name in self.graph.topo_sort: for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name) node = self.graph.get_node(node_name)
op = node.layer_type op = node.layer_type
...@@ -70,13 +73,6 @@ class TFOpMapper(OpMapper): ...@@ -70,13 +73,6 @@ class TFOpMapper(OpMapper):
else: else:
raise Exception("OP: [{}] not support yet".format(op)) raise Exception("OP: [{}] not support yet".format(op))
for i in range(len(self.graph.topo_sort)):
node_name = self.graph.topo_sort[i]
if node_name in self.omit_nodes:
continue
node = self.graph.get_node(node_name)
self.net_code += node.fluid_code.gen_codes()
def directly_map(self, node): def directly_map(self, node):
assert node.layer_type in self.directly_map_ops assert node.layer_type in self.directly_map_ops
op_info = self.directly_map_ops[node.layer_type] op_info = self.directly_map_ops[node.layer_type]
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册