提交 6391ec6e 编写于 作者: J jiangjiajun

fix bug for save inference model

上级 ac001819
......@@ -22,7 +22,7 @@ class Layer(object):
self.param_attr = dict()
self.inputs = dict()
self.output = None
self.is_new = False
self.is_custom_layer = False
def get_code(self):
layer_code = ""
......@@ -32,6 +32,9 @@ class Layer(object):
else:
layer_code = self.output.layer_name + " = "
if self.is_custom_layer:
layer_code = layer_code + self.op + "("
else:
layer_code = layer_code + "fluid.layers." + self.op + "("
if isinstance(self.inputs, list):
......@@ -83,38 +86,6 @@ class Layer(object):
return layer_code + ")"
def get_custom_code(self):
layer_code = ""
if self.output is not None:
if isinstance(self.output, str):
layer_code = self.output + " = "
else:
layer_code = self.output.layer_name + " = "
layer_code = layer_code + self.op + "("
if isinstance(self.inputs, list):
in_list = "["
for input in self.inputs:
assert isinstance(
input, GraphNode), "Type of input should be GraphNode"
if hasattr(input, "index"):
in_list += (input.layer_name + "[{}]".format(input.index) +
", ")
else:
in_list += (input.layer_name + ", ")
in_list = in_list.strip(", ") + "], "
layer_code += in_list
else:
raise Exception("Unknown type of inputs.")
param_attr = collections.OrderedDict(self.param_attr)
for key, value in param_attr.items():
layer_code = layer_code + key + "={}, ".format(value)
layer_code = layer_code.strip(", ")
return layer_code + ")"
class FluidCode(object):
def __init__(self):
......@@ -147,9 +118,6 @@ class FluidCode(object):
codes = list()
for layer in self.layers:
if isinstance(layer, Layer):
if layer.is_custom_layer:
codes.append(layer.get_custom_code())
else:
codes.append(layer.get_code())
elif isinstance(layer, str):
codes.append(layer)
......
......@@ -17,6 +17,35 @@ import inspect
import os
def export_paddle_param(param, param_name, dir):
dtype_map = {
"int16": [framework_pb2.VarType.INT16, 'h'],
"int32": [framework_pb2.VarType.INT32, 'i'],
"int64": [framework_pb2.VarType.INT64, 'q'],
"float16": [framework_pb2.VarType.FP16, 'e'],
"float32": [framework_pb2.VarType.FP32, 'f'],
"float64": [framework_pb2.VarType.FP64, 'd']
}
shape = param.shape
if len(shape) == 0:
assert param.size == 1, "Unexpected situation happend!"
shape = [1]
assert str(param.dtype) in dtype_map, "Unknown dtype of params."
fp = open(os.path.join(dir, param_name), 'wb')
numpy.array([0], dtype='int32').tofile(fp)
numpy.array([0], dtype='int64').tofile(fp)
numpy.array([0], dtype='int32').tofile(fp)
tensor_desc = framework_pb2.VarType.TensorDesc()
tensor_desc.data_type = dtype_map[str(param.dtype)][0]
tensor_desc.dims.extend(shape)
desc_size = tensor_desc.ByteSize()
numpy.array([desc_size], dtype='int32').tofile(fp)
fp.write(tensor_desc.SerializeToString())
param.tofile(fp)
fp.close()
class OpMapper(object):
def __init__(self):
self.paddle_codes = ""
......@@ -74,11 +103,11 @@ class OpMapper(object):
def if_exist(var):
b = os.path.exists(
os.path.join(os.path.join(save_dir, var.name)))
os.path.join(os.path.join(py_code_dir, var.name)))
return b
fluid.io.load_vars(exe,
save_dir,
py_code_dir,
fluid.default_main_program(),
predicate=if_exist)
......@@ -88,7 +117,6 @@ class OpMapper(object):
target_vars=outputs,
executor=exe,
params_filename="__params__")
except:
raise Exception(
"Paddle code was saved in {}/model.py, but seems there's wrong exist, please check model.py manually."
......@@ -109,6 +137,7 @@ class OpMapper(object):
if hasattr(self, "used_custom_layers"):
for _, layer_code in self.used_custom_layers.items():
self.add_codes(layer_code, 0)
self.add_codes("", 0)
self.add_codes("\ndef x2paddle_net():", 0)
for i in range(len(self.graph.topo_sort)):
......
......@@ -32,48 +32,11 @@ def color_log(log_str):
print(log_str)
def get_same_padding(in_size, kernel_size, stride):
new_size = int(math.ceil(in_size * 1.0 / stride))
pad_size = (new_size - 1) * stride + kernel_size - in_size
pad0 = int(pad_size / 2)
pad1 = pad_size - pad0
return [pad0, pad1]
def export_paddle_param(param, param_name, dir):
dtype_map = {
"int16": [framework_pb2.VarType.INT16, 'h'],
"int32": [framework_pb2.VarType.INT32, 'i'],
"int64": [framework_pb2.VarType.INT64, 'q'],
"float16": [framework_pb2.VarType.FP16, 'e'],
"float32": [framework_pb2.VarType.FP32, 'f'],
"float64": [framework_pb2.VarType.FP64, 'd']
}
shape = param.shape
if len(shape) == 0:
assert param.size == 1, "Unexpected situation happend!"
shape = [1]
assert str(param.dtype) in dtype_map, "Unknown dtype of params."
fp = open(os.path.join(dir, param_name), 'wb')
numpy.array([0], dtype='int32').tofile(fp)
numpy.array([0], dtype='int64').tofile(fp)
numpy.array([0], dtype='int32').tofile(fp)
tensor_desc = framework_pb2.VarType.TensorDesc()
tensor_desc.data_type = dtype_map[str(param.dtype)][0]
tensor_desc.dims.extend(shape)
desc_size = tensor_desc.ByteSize()
numpy.array([desc_size], dtype='int32').tofile(fp)
fp.write(tensor_desc.SerializeToString())
param.tofile(fp)
fp.close()
# This func will copy to generate code file
def run_net(param_dir="./"):
import os
inputs, outputs = x2paddle_net()
exe = fluid.Executor(fluid.CUDAPlace(0))
exe = fluid.Executor(fluid.CPUPlace())
exe.run(fluid.default_startup_program())
def if_exist(var):
......
......@@ -15,11 +15,20 @@
from x2paddle.decoder.tf_decoder import TFGraph
from x2paddle.core.op_mapper import OpMapper
from x2paddle.core.util import *
import inspect
import numpy
class TFOpMapper(OpMapper):
# compute padding size for SAME mode
def get_same_padding(in_size, kernel_size, stride):
new_size = int(math.ceil(in_size * 1.0 / stride))
pad_size = (new_size - 1) * stride + kernel_size - in_size
pad0 = int(pad_size / 2)
pad1 = pad_size - pad0
return [pad0, pad1]
class TFOpMapper(OpMapper):
directly_map_ops = {
'Relu': ['relu'],
'Relu6': ['relu6'],
......@@ -50,6 +59,7 @@ class TFOpMapper(OpMapper):
self.graph = decoder.tf_graph
self.weights = dict()
self.omit_nodes = list()
self.used_custom_layers = dict()
not_placeholder = list()
for name in self.graph.input_nodes:
......@@ -572,7 +582,7 @@ class TFOpMapper(OpMapper):
paddings = self.graph.get_node(node.layer.input[1], copy=True)
assert paddings.layer_type == "Const", "Padding should be Const"
self.omit_nodes.append(paddings.layer_name)
attr = {"paddings": paddings.value.tolist()}
attr = {"paddings": paddings.value.flatten().tolist()}
node.fluid_code.add_layer("pad",
inputs=input,
output=node,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册