提交 ba689267 编写于 作者: S SunAhong1993

add tf ops

上级 18cc4a40
...@@ -69,13 +69,19 @@ class TFOpMapper(OpMapper): ...@@ -69,13 +69,19 @@ class TFOpMapper(OpMapper):
'Add': 'paddle.add', 'Add': 'paddle.add',
'AddV2': 'paddle.add', 'AddV2': 'paddle.add',
'RealDiv': 'paddle.divide', 'RealDiv': 'paddle.divide',
'DivNoNan': 'paddle.divide',
'Sub': 'fluid.layers.elementwise_sub', 'Sub': 'fluid.layers.elementwise_sub',
'Maximum': 'paddle.maximum', 'Maximum': 'paddle.maximum',
'Minimum': 'paddle.minimum', 'Minimum': 'paddle.minimum',
'LessEqual': 'paddle.less_equal', 'LessEqual': 'paddle.less_equal',
'GreaterEqual': 'paddle.greater_equal', 'GreaterEqual': 'paddle.greater_equal',
'Greater': 'paddle.greater_than',
'NotEqual': 'paddle.not_equal',
'Equal': 'paddle.equal',
'Mul': 'paddle.multiply', 'Mul': 'paddle.multiply',
'FloorDiv': 'paddle.floor_divide' 'FloorDiv': 'paddle.floor_divide',
'FloorMod': 'paddle.floor_mod',
'LogicalAnd': 'logical_and',
} }
def __init__(self, decoder): def __init__(self, decoder):
...@@ -185,16 +191,6 @@ class TFOpMapper(OpMapper): ...@@ -185,16 +191,6 @@ class TFOpMapper(OpMapper):
outputs=[node.name]) outputs=[node.name])
self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape} self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape}
def NotEqual(self, node):
x = self.graph.get_input_node(node, 0)
y = self.graph.get_input_node(node, 1)
self.paddle_graph.add_layer(
kernel="paddle.not_equal",
inputs={"x": x.name,
"y": y.name},
outputs=[node.name])
def Placeholder(self, node): def Placeholder(self, node):
shape = node.out_shapes[0] shape = node.out_shapes[0]
assert len(shape) != 0, "Unknown shape of input nodes[{}].".format( assert len(shape) != 0, "Unknown shape of input nodes[{}].".format(
...@@ -249,6 +245,24 @@ class TFOpMapper(OpMapper): ...@@ -249,6 +245,24 @@ class TFOpMapper(OpMapper):
outputs=[node.name], outputs=[node.name],
perm=perm) perm=perm)
def Where(self, node):
if len(node.layer.input) == 1:
cond = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
"paddle.nonzero",
inputs={"x": cond.name},
outputs=[node.name])
else:
cond = self.graph.get_input_node(node, 0)
x = self.graph.get_input_node(node, 1)
y = self.graph.get_input_node(node, 2)
self.paddle_graph.add_layer(
"paddle.where",
inputs={"condition": cond.name,
"x": x.name,
"y": y.name},
outputs=[node.name])
def Neg(self, node): def Neg(self, node):
input = self.graph.get_input_node(node, 0) input = self.graph.get_input_node(node, 0)
...@@ -437,6 +451,71 @@ class TFOpMapper(OpMapper): ...@@ -437,6 +451,71 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name}, inputs={"x": node.name},
outputs=[node.name], outputs=[node.name],
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def Conv3D(self, node):
op_name = name_generator("conv", self.nn_name2id)
output_name = node.name
layer_outputs = [op_name, output_name]
input = self.graph.get_input_node(node, 0)
kernel = self.graph.get_input_node(node, 1)
k_size = kernel.out_shapes[0]
strides = node.get_attr("strides")
dilations = node.get_attr("dilations")
data_format = node.get_attr("data_format").decode()
pad_mode = node.get_attr("padding").decode()
if data_format == "NDHWC":
n, d, h, w, c = input.out_shapes[0]
else:
n, c, d, h, w = input.out_shapes[0]
if kernel.layer_type == 'Const':
kernel_value = kernel.value
else:
kernel_value = self.decoder.infer_tensor(kernel, use_diff_inputs=False)
kernel_weight_name = op_name + ".weight"
self.params[kernel_weight_name] = numpy.transpose(kernel_value,
(4, 3, 0, 1, 2))
input_name = input.name
if data_format == "NDHWC":
strides = [strides[i] for i in [0, 4, 1, 2, 3]]
dilations = [dilations[i] for i in [0, 4, 1, 2, 3]]
transpose_name = gen_name("conv3d", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 4, 1, 2, 3])
input_name = transpose_name
if c == -1:
attr = {"shape": [0, k_size[2], 0, 0, 0]}
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": input_name},
outputs=[input_name],
shape=[0, k_size[2], 0, 0, 0])
self.paddle_graph.add_layer(
kernel="paddle.nn.Conv3D",
inputs={"input": input_name},
outputs=layer_outputs,
weight_attr=string(kernel_weight_name),
bias_attr=False,
in_channels=k_size[3],
out_channels=k_size[4],
kernel_size=k_size[0:3],
stride=strides[2:5],
dilation=dilations[2:5],
padding=string(pad_mode))
if data_format == "NDHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 4, 1])
def BiasAdd(self, node): def BiasAdd(self, node):
input = self.graph.get_input_node(node, 0) input = self.graph.get_input_node(node, 0)
...@@ -575,6 +654,33 @@ class TFOpMapper(OpMapper): ...@@ -575,6 +654,33 @@ class TFOpMapper(OpMapper):
inputs={"x": input.name}, inputs={"x": input.name},
outputs=[node.name], outputs=[node.name],
pad=paddings) pad=paddings)
def MirrorPad(self, node):
op_name = name_generator("pad", self.nn_name2id)
output_name = node.name
layer_outputs = [op_name, output_name]
input = self.graph.get_input_node(node, 0)
paddings = self.graph.get_input_node(node, 1)
assert paddings.layer_type == "Const", "Padding should be Const"
paddings = np.flip(paddings.value, 0).flatten().tolist()
dim = int(len(paddings) / 2)
transpose_name = gen_name("pad", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
self.paddle_graph.add_layer(
kernel="paddle.nn.Pad{}D".format(dim),
inputs={"x": transpose_name},
outputs=layer_outputs,
pad=new_padding)
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Squeeze(self, node): def Squeeze(self, node):
input = self.graph.get_input_node(node, 0) input = self.graph.get_input_node(node, 0)
...@@ -592,6 +698,25 @@ class TFOpMapper(OpMapper): ...@@ -592,6 +698,25 @@ class TFOpMapper(OpMapper):
kernel="paddle.shape", kernel="paddle.shape",
inputs={"input": input_name}, inputs={"input": input_name},
outputs=[node.name]) outputs=[node.name])
def Size(self, node):
input = self.graph.get_input_node(node, 0)
input_name = input.name
self.paddle_graph.add_layer(
kernel="paddle.shape",
inputs={"input": input_name},
outputs=[node.name])
self.paddle_graph.add_layer(
kernel="paddle.prod",
inputs={"x": node.name},
outputs=[node.name])
def Ceil(self, node):
input = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
kernel="paddle.ceil",
inputs={"x": input.name},
outputs=[node.name])
def ArgMax(self, node): def ArgMax(self, node):
input = self.graph.get_input_node(node, 0) input = self.graph.get_input_node(node, 0)
...@@ -603,6 +728,19 @@ class TFOpMapper(OpMapper): ...@@ -603,6 +728,19 @@ class TFOpMapper(OpMapper):
inputs={"x": input.name}, inputs={"x": input.name},
outputs=[node.name], outputs=[node.name],
axis=axis) axis=axis)
def TopKV2(self, node):
input = self.graph.get_input_node(node, 0)
k = self.graph.get_input_node(node, 1)
assert k.layer_type == "Const", "ArgMax only support Const parameter"
k = k.value
sort = node.get_attr('sorted')
self.paddle_graph.add_layer(
kernel="paddle.topk",
inputs={"x": input.name},
outputs=[node.name],
k=k,
sorted=sort)
def MatMul(self, node): def MatMul(self, node):
x = self.graph.get_input_node(node, 0) x = self.graph.get_input_node(node, 0)
...@@ -765,10 +903,13 @@ class TFOpMapper(OpMapper): ...@@ -765,10 +903,13 @@ class TFOpMapper(OpMapper):
axis = 1 axis = 1
else: else:
raise Exception("Unexpected situation happend in Unpack OP") raise Exception("Unexpected situation happend in Unpack OP")
layer_outputs = ["{}_p{}".format(node.layer_name, i) for i in range(num)]
if len(layer_outputs) == 1:
layer_outputs[0] = "[{}]".format(node.layer_name)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.unstack", kernel="paddle.unstack",
inputs={"x": input_name}, inputs={"x": input_name},
outputs=["{}_p{}".format(node.layer_name, i) for i in range(num)], outputs=layer_outputs,
axis=axis, axis=axis,
num=num) num=num)
...@@ -776,7 +917,6 @@ class TFOpMapper(OpMapper): ...@@ -776,7 +917,6 @@ class TFOpMapper(OpMapper):
inputs_list = list() inputs_list = list()
for i in range(len(node.inputs) - 1): for i in range(len(node.inputs) - 1):
inputs_list.append(self.graph.get_input_node(node, i)) inputs_list.append(self.graph.get_input_node(node, i))
# inputs_list = [self.graph.get_node(name) for name in node.layer.input[:-1]]
axis = self.graph.get_input_node(node, -1) axis = self.graph.get_input_node(node, -1)
assert axis.layer_type == "Const", "axis for ConcatV2 must be type Const" assert axis.layer_type == "Const", "axis for ConcatV2 must be type Const"
axis = axis.value axis = axis.value
...@@ -789,6 +929,17 @@ class TFOpMapper(OpMapper): ...@@ -789,6 +929,17 @@ class TFOpMapper(OpMapper):
inputs={"x": input_names}, inputs={"x": input_names},
outputs=[node.name], outputs=[node.name],
axis=axis) axis=axis)
def AddN(self, node):
inputs_list = list()
for i in range(len(node.inputs) - 1):
inputs_list.append(self.graph.get_input_node(node, i))
input_names = [i.name for i in inputs_list]
self.paddle_graph.add_layer(
kernel="paddle.add_n",
inputs={"inputs": input_names},
outputs=[node.name])
def StridedSlice(self, node): def StridedSlice(self, node):
input = self.graph.get_input_node(node, 0) input = self.graph.get_input_node(node, 0)
...@@ -894,6 +1045,20 @@ class TFOpMapper(OpMapper): ...@@ -894,6 +1045,20 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name}, inputs={"x": node.name},
outputs=[node.name], outputs=[node.name],
axis=shrink_axes) axis=shrink_axes)
def Prod(self, node):
input = self.graph.get_input_node(node, 0)
reduction_indices = self.graph.get_input_node(node, 1)
assert reduction_indices.layer_type == "Const"
keep_dims = node.get_attr('keep_dims')
axis = reduction_indices.value
self.paddle_graph.add_layer(
kernel="paddle.prod",
inputs={"x": input.name},
outputs=[node.layer_name],
keepdim=keep_dims,
axis=axis)
def Split(self, node): def Split(self, node):
dim = self.graph.get_input_node(node, 0) dim = self.graph.get_input_node(node, 0)
...@@ -1177,15 +1342,15 @@ class TFOpMapper(OpMapper): ...@@ -1177,15 +1342,15 @@ class TFOpMapper(OpMapper):
def Tile(self, node): def Tile(self, node):
input = self.graph.get_input_node(node, 0) input = self.graph.get_input_node(node, 0)
expand_times = self.graph.get_input_node(node, 1) repeat_times = self.graph.get_input_node(node, 1)
inputs = {"x": input.name} inputs = {"x": input.name}
attr = dict() attr = dict()
in_shape = input.out_shapes[0] in_shape = input.out_shapes[0]
if expand_times.layer_type == "Const": if repeat_times.layer_type == "Const":
expand_times = expand_times.value.tolist() repeat_times = repeat_times.value.tolist()
attr["repeat_times"] = expand_times attr["repeat_times"] = repeat_times
else: else:
inputs["repeat_times"] = expand_times.name inputs["repeat_times"] = repeat_times.name
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.tile", kernel="paddle.tile",
...@@ -1206,6 +1371,7 @@ class TFOpMapper(OpMapper): ...@@ -1206,6 +1371,7 @@ class TFOpMapper(OpMapper):
if start.layer_type == "Const": if start.layer_type == "Const":
attr["start"] = start.value attr["start"] = start.value
else: else:
inputs["start"] = start.name inputs["start"] = start.name
if limit.dtype.startswith('float'): if limit.dtype.startswith('float'):
dtype = limit.dtype dtype = limit.dtype
...@@ -1309,8 +1475,7 @@ class TFOpMapper(OpMapper): ...@@ -1309,8 +1475,7 @@ class TFOpMapper(OpMapper):
index = self.graph.get_input_node(node, 1) index = self.graph.get_input_node(node, 1)
axis = self.graph.get_input_node(node, 2) axis = self.graph.get_input_node(node, 2)
assert axis.layer_type == 'Const', "Only support Const parameter[axis]" assert axis.layer_type == 'Const', "Only support Const parameter[axis]"
axis = axis.value.tolist() axis = axis.value
assert axis == 0, "Only support axis=0 in GatherV2 OP"
index_name = index.name index_name = index.name
if len(index.out_shapes[0]) != 1: if len(index.out_shapes[0]) != 1:
reshape_name = gen_name("gather", "reshape") reshape_name = gen_name("gather", "reshape")
...@@ -1324,7 +1489,8 @@ class TFOpMapper(OpMapper): ...@@ -1324,7 +1489,8 @@ class TFOpMapper(OpMapper):
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.gather", "paddle.gather",
inputs=inputs, inputs=inputs,
outputs=[node.name]) outputs=[node.name],
axis=axis)
if len(index.out_shapes[0]) != 1: if len(index.out_shapes[0]) != 1:
out_shape = node.out_shapes[0] out_shape = node.out_shapes[0]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -1332,6 +1498,15 @@ class TFOpMapper(OpMapper): ...@@ -1332,6 +1498,15 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name}, inputs={"x": node.name},
outputs=[node.name], outputs=[node.name],
shape=out_shape) shape=out_shape)
def GatherNd(self, node):
x = self.graph.get_input_node(node, 0)
index = self.graph.get_input_node(node, 1)
inputs = {'x': x.name, 'index': index.name}
self.paddle_graph.add_layer(
"paddle.gather_nd",
inputs=inputs,
outputs=[node.name])
def ExpandDims(self, node): def ExpandDims(self, node):
x = self.graph.get_input_node(node, 0, copy=True) x = self.graph.get_input_node(node, 0, copy=True)
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from x2paddle.decoder.tf_decoder import TFGraph from x2paddle.decoder.tf_decoder import TFGraph, TFGraphNode
from x2paddle.core.program import PaddleGraph from x2paddle.core.program import PaddleGraph
from x2paddle.core.op_mapper import OpMapper from x2paddle.core.op_mapper import OpMapper
from x2paddle.core.util import * from x2paddle.core.util import *
...@@ -67,22 +67,30 @@ class TFOpMapper(OpMapper): ...@@ -67,22 +67,30 @@ class TFOpMapper(OpMapper):
'Square': ['square'] 'Square': ['square']
} }
elementwise_ops = { elementwise_ops = {
'Add': 'elementwise_add', 'Add': 'paddle.add',
'AddV2': 'elementwise_add', 'AddV2': 'paddle.add',
'RealDiv': 'elementwise_div', 'RealDiv': 'paddle.divide',
'Sub': 'elementwise_sub', 'DivNoNan': 'paddle.divide',
'Maximum': 'elementwise_max', 'Sub': 'fluid.layers.elementwise_sub',
'Minimum': 'elementwise_min', 'Maximum': 'paddle.maximum',
'LessEqual': 'less_equal', 'Minimum': 'paddle.minimum',
'GreaterEqual': 'greater_equal', 'LessEqual': 'paddle.less_equal',
'Mul': 'elementwise_mul', 'GreaterEqual': 'paddle.greater_equal',
'FloorDiv': 'elementwise_floordiv' 'Greater': 'paddle.greater_than',
'NotEqual': 'paddle.not_equal',
'Equal': 'paddle.equal',
'Mul': 'paddle.multiply',
'FloorDiv': 'paddle.floor_divide',
'FloorMod': 'paddle.floor_mod',
'LogicalAnd': 'logical_and',
} }
def __init__(self, decoder): def __init__(self, decoder):
super(TFOpMapper, self).__init__() super(TFOpMapper, self).__init__()
self.decoder = decoder self.decoder = decoder
self.graph = decoder.tf_graph self.graph = decoder.tf_graph
if not self.op_checker():
raise Exception("Model is not supported yet.")
self.params = dict() self.params = dict()
self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="static", source_type="tf") self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="static", source_type="tf")
...@@ -101,40 +109,45 @@ class TFOpMapper(OpMapper): ...@@ -101,40 +109,45 @@ class TFOpMapper(OpMapper):
self.paddle_graph.inputs = self.graph.input_nodes self.paddle_graph.inputs = self.graph.input_nodes
self.paddle_graph.outputs = self.graph.output_nodes self.paddle_graph.outputs = self.graph.output_nodes
unsupported_ops = set() print("Total nodes: {}".format(
sys.stderr.write("Total nodes: {}\n".format(len(self.graph.topo_sort))) sum([
isinstance(node, TFGraphNode)
for name, node in self.graph.node_map.items()
])))
print("Nodes converting ...")
for i, node_name in enumerate(self.graph.topo_sort): for i, node_name in enumerate(self.graph.topo_sort):
sys.stderr.write("\rConverting node {} ... ".format(i + 1)) sys.stderr.write("\rConverting node {} ... ".format(i + 1))
node = self.graph.get_node(node_name) node = self.graph.get_node(node_name)
op = node.layer_type op = node.layer_type
if op in self.directly_map_ops: if op in self.directly_map_ops:
if len(unsupported_ops) > 0:
continue
self.directly_map(node) self.directly_map(node)
elif op in self.elementwise_ops: elif op in self.elementwise_ops:
if len(unsupported_ops) > 0:
continue
self.elementwise_map(node) self.elementwise_map(node)
elif hasattr(self, op): elif hasattr(self, op):
if len(unsupported_ops) > 0:
continue
func = getattr(self, op) func = getattr(self, op)
try: func(node)
func(node) print("\nNodes converted.")
except Exception as e: self.paddle_graph.set_name(self.graph.graph_name)
unsupported_ops.add(op) self.paddle_graph.set_parameters(self.params)
print("\n{}\n".format(traceback.format_exc()))
else: def op_checker(self):
unsupported_ops = set()
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
op = node.layer_type
if not hasattr(self, op) and \
op not in self.directly_map_ops and \
op not in self.elementwise_ops:
unsupported_ops.add(op) unsupported_ops.add(op)
if len(unsupported_ops) > 0: if len(unsupported_ops) == 0:
print("\n========= {} OPs are not supported yet ===========".format( return True
len(unsupported_ops))) else:
if len(unsupported_ops) > 0:
print("\n========= {} OPs are not supported yet ===========".format(
len(unsupported_ops)))
for op in unsupported_ops: for op in unsupported_ops:
print("========== {} ============".format(op)) print("========== {} ============".format(op))
sys.exit(-1) return False
sys.stderr.write("\nDone!\n")
self.paddle_graph.set_name(self.graph.graph_name)
self.paddle_graph.set_parameters(self.params)
def directly_map(self, node): def directly_map(self, node):
assert node.layer_type in self.directly_map_ops assert node.layer_type in self.directly_map_ops
...@@ -161,22 +174,12 @@ class TFOpMapper(OpMapper): ...@@ -161,22 +174,12 @@ class TFOpMapper(OpMapper):
x_shape = x.out_shapes[0] x_shape = x.out_shapes[0]
y_shape = y.out_shapes[0] y_shape = y.out_shapes[0]
layer_id = self.paddle_graph.add_layer( layer_id = self.paddle_graph.add_layer(
kernel="fluid.layers.{}".format(op_type), kernel=op_type,
inputs={"x": x.name, inputs={"x": x.name,
"y": y.name}, "y": y.name},
outputs=[node.name]) outputs=[node.name])
self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape} self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape}
def NotEqual(self, node):
x = self.graph.get_node(node.layer.input[0])
y = self.graph.get_node(node.layer.input[1])
self.paddle_graph.add_layer(
kernel="fluid.layers.not_equal",
inputs={"x": x.name,
"y": y.name},
outputs=[node.name])
def Placeholder(self, node): def Placeholder(self, node):
shape = node.out_shapes[0] shape = node.out_shapes[0]
assert len(shape) != 0, "Unknown shape of input nodes[{}].".format( assert len(shape) != 0, "Unknown shape of input nodes[{}].".format(
...@@ -249,6 +252,12 @@ class TFOpMapper(OpMapper): ...@@ -249,6 +252,12 @@ class TFOpMapper(OpMapper):
inputs=inputs, inputs=inputs,
outputs=[node.name], outputs=[node.name],
**attr) **attr)
if dims.layer_type != "Const":
self.paddle_graph.add_layer(
"paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=node.out_shapes[0])
def DepthToSpace(self, node): def DepthToSpace(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_node(node.layer.input[0])
...@@ -305,6 +314,24 @@ class TFOpMapper(OpMapper): ...@@ -305,6 +314,24 @@ class TFOpMapper(OpMapper):
outputs=[node.name], outputs=[node.name],
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def Where(self, node):
if len(node.layer.input) == 1:
cond = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
"paddle.nonzero",
inputs={"x": cond.name},
outputs=[node.name])
else:
cond = self.graph.get_input_node(node, 0)
x = self.graph.get_input_node(node, 1)
y = self.graph.get_input_node(node, 2)
self.paddle_graph.add_layer(
"paddle.where",
inputs={"condition": cond.name,
"x": x.name,
"y": y.name},
outputs=[node.name])
def Neg(self, node): def Neg(self, node):
input = self.graph.get_input_node(node, 0) input = self.graph.get_input_node(node, 0)
...@@ -417,6 +444,83 @@ class TFOpMapper(OpMapper): ...@@ -417,6 +444,83 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name}, inputs={"x": node.name},
outputs=[node.name], outputs=[node.name],
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def Conv3D(self, node):
input = self.graph.get_input_node(node, 0)
kernel = self.graph.get_input_node(node, 1)
k_size = kernel.out_shapes[0]
strides = node.get_attr("strides")
dilations = node.get_attr("dilations")
data_format = node.get_attr("data_format").decode()
pad_mode = node.get_attr("padding").decode()
if data_format == "NDHWC":
n, d, h, w, c = input.out_shapes[0]
else:
n, c, d, h, w = input.out_shapes[0]
if kernel.layer_type == 'Const':
kernel_value = kernel.value
kernel_weight_name = kernel.name.replace('/', '_')
self.paddle_graph.add_layer(
kernel="paddle.static.nn.create_parameter",
inputs={},
outputs=[kernel_weight_name],
shape=self.params[kernel_weight_name].shape,
dtype=string(str(self.params[kernel_weight_name].dtype)),
name=string(kernel_weight_name))
self.params[kernel_weight_name] = numpy.transpose(kernel_value,
(4, 3, 0, 1, 2))
else:
kernel_value = self.decoder.infer_tensor(kernel, use_diff_inputs=False)
if kernel.layer_type == 'Split':
kernel_weight_name = "{}_{}_kernel".format(node.name,
kernel.name)
else:
kernel_weight_name = kernel.name.replace('/', '_')
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": kernel_weight_name},
outputs=[kernel_weight_name],
perm=[4, 3, 0, 1, 2])
input_name = input.name
if data_format == "NDHWC":
strides = [strides[i] for i in [0, 4, 1, 2, 3]]
dilations = [dilations[i] for i in [0, 4, 1, 2, 3]]
transpose_name = gen_name("conv3d", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 4, 1, 2, 3])
input_name = transpose_name
if c == -1:
attr = {"shape": [0, k_size[2], 0, 0, 0]}
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": input_name},
outputs=[input_name],
shape=[0, k_size[2], 0, 0, 0])
self.paddle_graph.add_layer(
kernel="paddle.nn.functional.conv3d",
inputs={"x": input_name},
outputs=[node.name],
weight=kernel_weight_name,
bias=None,
stride=strides[2:5],
dilation=dilations[2:5],
padding=string(pad_mode))
if data_format == "NDHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 4, 1])
def BiasAdd(self, node): def BiasAdd(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_node(node.layer.input[0])
...@@ -476,36 +580,28 @@ class TFOpMapper(OpMapper): ...@@ -476,36 +580,28 @@ class TFOpMapper(OpMapper):
keep_dims = node.get_attr("keep_dims") keep_dims = node.get_attr("keep_dims")
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.reduce_mean", kernel="paddle.mean",
inputs={"input": input.name}, inputs={"x": input.name},
outputs=[node.name], outputs=[node.name],
dim=dims, axis=dims,
keep_dim=keep_dims) keepdim=keep_dims)
def Reshape(self, node): def Reshape(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
param = self.graph.get_node(node.layer.input[1]) param = self.graph.get_input_node(node, 1)
input_name = input.name input_name = input.name
if input.dtype == 'bool':
cast_name = gen_name('reshape', 'cast')
self.paddle_graph.add_layer(
kernel="fluid.layers.cast",
inputs={"x": input_name},
outputs=[cast_name],
dtype="'int32'")
input_name = cast_name
if param.layer_type == "Const": if param.layer_type == "Const":
shape = param.value.tolist() shape = param.value.tolist()
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.reshape", kernel="paddle.reshape",
inputs={"x": input_name}, inputs={"x": input_name},
outputs=[node.name], outputs=[node.name],
shape=shape) shape=shape)
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.reshape", kernel="paddle.reshape",
inputs={"x": input_name, inputs={"x": input_name,
"shape": param.name}, "shape": param.name},
outputs=[node.name]) outputs=[node.name])
...@@ -514,17 +610,52 @@ class TFOpMapper(OpMapper): ...@@ -514,17 +610,52 @@ class TFOpMapper(OpMapper):
if (out_shape > 0).any(): if (out_shape > 0).any():
out_shape[out_shape < 0] = 0 out_shape[out_shape < 0] = 0
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.reshape", kernel="paddle.reshape",
inputs={"x": node.name}, inputs={"x": node.name},
outputs=[node.name], outputs=[node.name],
shape=out_shape.tolist()) shape=out_shape.tolist())
# input = self.graph.get_node(node.layer.input[0])
if input.dtype == 'bool': # param = self.graph.get_node(node.layer.input[1])
self.paddle_graph.add_layer(
kernel="fluid.layers.cast", # input_name = input.name
inputs={"x": node.name}, # if input.dtype == 'bool':
outputs=[node.name], # cast_name = gen_name('reshape', 'cast')
dtype="'bool'") # self.paddle_graph.add_layer(
# kernel="fluid.layers.cast",
# inputs={"x": input_name},
# outputs=[cast_name],
# dtype="'int32'")
# input_name = cast_name
# if param.layer_type == "Const":
# shape = param.value.tolist()
# self.paddle_graph.add_layer(
# kernel="fluid.layers.reshape",
# inputs={"x": input_name},
# outputs=[node.name],
# shape=shape)
# else:
# self.paddle_graph.add_layer(
# kernel="fluid.layers.reshape",
# inputs={"x": input_name,
# "shape": param.name},
# outputs=[node.name])
# if param.layer_type != "Const":
# out_shape = numpy.array(node.out_shapes[0])
# if (out_shape > 0).any():
# out_shape[out_shape < 0] = 0
# self.paddle_graph.add_layer(
# kernel="fluid.layers.reshape",
# inputs={"x": node.name},
# outputs=[node.name],
# shape=out_shape.tolist())
# if input.dtype == 'bool':
# self.paddle_graph.add_layer(
# kernel="fluid.layers.cast",
# inputs={"x": node.name},
# outputs=[node.name],
# dtype="'bool'")
def Pad(self, node): def Pad(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_node(node.layer.input[0])
...@@ -558,6 +689,32 @@ class TFOpMapper(OpMapper): ...@@ -558,6 +689,32 @@ class TFOpMapper(OpMapper):
inputs={"x": input.name}, inputs={"x": input.name},
outputs=[node.name], outputs=[node.name],
paddings=paddings) paddings=paddings)
def MirrorPad(self, node):
op_name = name_generator("pad", self.nn_name2id)
output_name = node.name
layer_outputs = [op_name, output_name]
input = self.graph.get_input_node(node, 0)
paddings = self.graph.get_input_node(node, 1)
assert paddings.layer_type == "Const", "Padding should be Const"
paddings = np.flip(paddings.value, 0).flatten().tolist()
dim = int(len(paddings) / 2)
transpose_name = gen_name("pad", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
self.paddle_graph.add_layer(
kernel="paddle.nn.Pad{}D".format(dim),
inputs={"x": transpose_name},
outputs=layer_outputs,
pad=new_padding)
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Squeeze(self, node): def Squeeze(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_node(node.layer.input[0])
...@@ -578,21 +735,36 @@ class TFOpMapper(OpMapper): ...@@ -578,21 +735,36 @@ class TFOpMapper(OpMapper):
axis=axis) axis=axis)
def Shape(self, node): def Shape(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
input_name = input.name input_name = input.name
if input.dtype == 'bool':
cast_name = gen_name('shape', 'cast')
self.paddle_graph.add_layer(
kernel="fluid.layers.cast",
inputs={"x": input.name},
outputs=[cast_name],
dtype="'int32'")
input_name = cast_name
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.shape", kernel="paddle.shape",
inputs={"input": input_name}, inputs={"input": input_name},
outputs=[node.name]) outputs=[node.name])
def Size(self, node):
input = self.graph.get_input_node(node, 0)
input_name = input.name
self.paddle_graph.add_layer(
kernel="fluid.layers.size",
inputs={"input": input_name},
outputs=[node.name])
# self.paddle_graph.add_layer(
# kernel="paddle.shape",
# inputs={"input": input_name},
# outputs=[node.name])
# self.paddle_graph.add_layer(
# kernel="paddle.prod",
# inputs={"x": node.name},
# outputs=[node.name])
def Ceil(self, node):
input = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
kernel="paddle.ceil",
inputs={"x": input.name},
outputs=[node.name])
def ArgMax(self, node): def ArgMax(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_node(node.layer.input[0])
axis = self.graph.get_node(node.layer.input[1]) axis = self.graph.get_node(node.layer.input[1])
...@@ -603,6 +775,19 @@ class TFOpMapper(OpMapper): ...@@ -603,6 +775,19 @@ class TFOpMapper(OpMapper):
inputs={"x": input.name}, inputs={"x": input.name},
outputs=[node.name], outputs=[node.name],
axis=axis) axis=axis)
def TopKV2(self, node):
input = self.graph.get_input_node(node, 0)
k = self.graph.get_input_node(node, 1)
assert k.layer_type == "Const", "ArgMax only support Const parameter"
k = k.value
sort = node.get_attr('sorted')
self.paddle_graph.add_layer(
kernel="paddle.topk",
inputs={"x": input.name},
outputs=[node.name],
k=k,
sorted=sort)
def MatMul(self, node): def MatMul(self, node):
x = self.graph.get_node(node.layer.input[0]) x = self.graph.get_node(node.layer.input[0])
...@@ -744,10 +929,13 @@ class TFOpMapper(OpMapper): ...@@ -744,10 +929,13 @@ class TFOpMapper(OpMapper):
axis = 1 axis = 1
else: else:
raise Exception("Unexpected situation happend in Unpack OP") raise Exception("Unexpected situation happend in Unpack OP")
layer_outputs = ["{}_p{}".format(node.layer_name, i) for i in range(num)]
if len(layer_outputs) == 1:
layer_outputs[0] = "[{}]".format(node.layer_name)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.unstack", kernel="fluid.layers.unstack",
inputs={"x": input_name}, inputs={"x": input_name},
outputs=["{}_p{}".format(node.layer_name, i) for i in range(num)], outputs=layer_outputs,
axis=axis, axis=axis,
num=num) num=num)
...@@ -780,6 +968,17 @@ class TFOpMapper(OpMapper): ...@@ -780,6 +968,17 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name}, inputs={"x": node.name},
outputs=[node.name], outputs=[node.name],
dtype="'bool'") dtype="'bool'")
def AddN(self, node):
inputs_list = list()
for i in range(len(node.inputs) - 1):
inputs_list.append(self.graph.get_input_node(node, i))
input_names = [i.name for i in inputs_list]
self.paddle_graph.add_layer(
kernel="paddle.add_n",
inputs={"inputs": input_names},
outputs=[node.name])
def StridedSlice(self, node): def StridedSlice(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_node(node.layer.input[0])
...@@ -870,6 +1069,20 @@ class TFOpMapper(OpMapper): ...@@ -870,6 +1069,20 @@ class TFOpMapper(OpMapper):
inputs={"input": node.name}, inputs={"input": node.name},
outputs=[node.name], outputs=[node.name],
axes=shrink_axes) axes=shrink_axes)
def Prod(self, node):
input = self.graph.get_input_node(node, 0)
reduction_indices = self.graph.get_input_node(node, 1)
assert reduction_indices.layer_type == "Const"
keep_dims = node.get_attr('keep_dims')
axis = reduction_indices.value
self.paddle_graph.add_layer(
kernel="paddle.prod",
inputs={"x": input.name},
outputs=[node.layer_name],
keepdim=keep_dims,
axis=axis)
def Split(self, node): def Split(self, node):
dim = self.graph.get_node(node.layer.input[0]) dim = self.graph.get_node(node.layer.input[0])
...@@ -1128,20 +1341,27 @@ class TFOpMapper(OpMapper): ...@@ -1128,20 +1341,27 @@ class TFOpMapper(OpMapper):
def Tile(self, node): def Tile(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_node(node.layer.input[0])
expand_times = self.graph.get_node(node.layer.input[1]) repeat_times = self.graph.get_node(node.layer.input[1])
inputs = {"x": input.name} inputs = {"x": input.name}
attr = dict() attr = dict()
if expand_times.layer_type == "Const": if repeat_times.layer_type == "Const":
expand_times = expand_times.value.tolist() repeat_times = repeat_times.value.tolist()
attr["expand_times"] = expand_times attr["repeat_times"] = repeat_times
else: else:
inputs["expand_times"] = expand_times.name inputs["repeat_times"] = repeat_times.name
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.expand", kernel="paddle.tile",
inputs=inputs, inputs=inputs,
outputs=[node.name], outputs=[node.name],
**attr) **attr)
if not isinstance(repeat_times, list) and repeat_times.layer_type != "Const":
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=node.out_shapes[0])
def Range(self, node): def Range(self, node):
start = self.graph.get_node(node.layer.input[0]) start = self.graph.get_node(node.layer.input[0])
...@@ -1173,10 +1393,18 @@ class TFOpMapper(OpMapper): ...@@ -1173,10 +1393,18 @@ class TFOpMapper(OpMapper):
attr["dtype"] = string(node.dtype) attr["dtype"] = string(node.dtype)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.range", kernel="paddle.arange",
inputs=inputs, inputs=inputs,
outputs=[node.name], outputs=[node.name],
**attr) **attr)
if start.layer_type != "Const" or \
limit.layer_type != "Const" or \
delta.layer_type != "Const":
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": node.name},
outputs=[node.name],
shape=node.out_shapes[0])
def SquaredDifference(self, node): def SquaredDifference(self, node):
x = self.graph.get_node(node.layer.input[0]) x = self.graph.get_node(node.layer.input[0])
...@@ -1259,7 +1487,7 @@ class TFOpMapper(OpMapper): ...@@ -1259,7 +1487,7 @@ class TFOpMapper(OpMapper):
index = self.graph.get_node(node.layer.input[1]) index = self.graph.get_node(node.layer.input[1])
axis = self.graph.get_node(node.layer.input[2]) axis = self.graph.get_node(node.layer.input[2])
assert axis.layer_type == 'Const', "Only support Const parameter[axis]" assert axis.layer_type == 'Const', "Only support Const parameter[axis]"
axis = axis.value.tolist() axis = axis.value
assert axis == 0, "Only support axis=0 in GatherV2 OP" assert axis == 0, "Only support axis=0 in GatherV2 OP"
index_name = index.name index_name = index.name
if len(index.out_shapes[0]) != 1: if len(index.out_shapes[0]) != 1:
...@@ -1283,6 +1511,15 @@ class TFOpMapper(OpMapper): ...@@ -1283,6 +1511,15 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name}, inputs={"x": node.name},
outputs=[node.name], outputs=[node.name],
shape=out_shape) shape=out_shape)
def GatherNd(self, node):
x = self.graph.get_input_node(node, 0)
index = self.graph.get_input_node(node, 1)
inputs = {'x': x.name, 'index': index.name}
self.paddle_graph.add_layer(
"paddle.gather_nd",
inputs=inputs,
outputs=[node.name])
def ExpandDims(self, node): def ExpandDims(self, node):
x = self.graph.get_node(node.layer.input[0], copy=True) x = self.graph.get_node(node.layer.input[0], copy=True)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册