提交 ba689267 编写于 作者: S SunAhong1993

add tf ops

上级 18cc4a40
......@@ -69,13 +69,19 @@ class TFOpMapper(OpMapper):
'Add': 'paddle.add',
'AddV2': 'paddle.add',
'RealDiv': 'paddle.divide',
'DivNoNan': 'paddle.divide',
'Sub': 'fluid.layers.elementwise_sub',
'Maximum': 'paddle.maximum',
'Minimum': 'paddle.minimum',
'LessEqual': 'paddle.less_equal',
'GreaterEqual': 'paddle.greater_equal',
'Greater': 'paddle.greater_than',
'NotEqual': 'paddle.not_equal',
'Equal': 'paddle.equal',
'Mul': 'paddle.multiply',
'FloorDiv': 'paddle.floor_divide'
'FloorDiv': 'paddle.floor_divide',
'FloorMod': 'paddle.floor_mod',
'LogicalAnd': 'logical_and',
}
def __init__(self, decoder):
......@@ -185,16 +191,6 @@ class TFOpMapper(OpMapper):
outputs=[node.name])
self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape}
def NotEqual(self, node):
x = self.graph.get_input_node(node, 0)
y = self.graph.get_input_node(node, 1)
self.paddle_graph.add_layer(
kernel="paddle.not_equal",
inputs={"x": x.name,
"y": y.name},
outputs=[node.name])
def Placeholder(self, node):
shape = node.out_shapes[0]
assert len(shape) != 0, "Unknown shape of input nodes[{}].".format(
......@@ -249,6 +245,24 @@ class TFOpMapper(OpMapper):
outputs=[node.name],
perm=perm)
def Where(self, node):
if len(node.layer.input) == 1:
cond = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
"paddle.nonzero",
inputs={"x": cond.name},
outputs=[node.name])
else:
cond = self.graph.get_input_node(node, 0)
x = self.graph.get_input_node(node, 1)
y = self.graph.get_input_node(node, 2)
self.paddle_graph.add_layer(
"paddle.where",
inputs={"condition": cond.name,
"x": x.name,
"y": y.name},
outputs=[node.name])
def Neg(self, node):
input = self.graph.get_input_node(node, 0)
......@@ -437,6 +451,71 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Conv3D(self, node):
op_name = name_generator("conv", self.nn_name2id)
output_name = node.name
layer_outputs = [op_name, output_name]
input = self.graph.get_input_node(node, 0)
kernel = self.graph.get_input_node(node, 1)
k_size = kernel.out_shapes[0]
strides = node.get_attr("strides")
dilations = node.get_attr("dilations")
data_format = node.get_attr("data_format").decode()
pad_mode = node.get_attr("padding").decode()
if data_format == "NDHWC":
n, d, h, w, c = input.out_shapes[0]
else:
n, c, d, h, w = input.out_shapes[0]
if kernel.layer_type == 'Const':
kernel_value = kernel.value
else:
kernel_value = self.decoder.infer_tensor(kernel, use_diff_inputs=False)
kernel_weight_name = op_name + ".weight"
self.params[kernel_weight_name] = numpy.transpose(kernel_value,
(4, 3, 0, 1, 2))
input_name = input.name
if data_format == "NDHWC":
strides = [strides[i] for i in [0, 4, 1, 2, 3]]
dilations = [dilations[i] for i in [0, 4, 1, 2, 3]]
transpose_name = gen_name("conv3d", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 4, 1, 2, 3])
input_name = transpose_name
if c == -1:
attr = {"shape": [0, k_size[2], 0, 0, 0]}
self.paddle_graph.add_layer(
kernel="paddle.reshape",
inputs={"x": input_name},
outputs=[input_name],
shape=[0, k_size[2], 0, 0, 0])
self.paddle_graph.add_layer(
kernel="paddle.nn.Conv3D",
inputs={"input": input_name},
outputs=layer_outputs,
weight_attr=string(kernel_weight_name),
bias_attr=False,
in_channels=k_size[3],
out_channels=k_size[4],
kernel_size=k_size[0:3],
stride=strides[2:5],
dilation=dilations[2:5],
padding=string(pad_mode))
if data_format == "NDHWC":
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 4, 1])
def BiasAdd(self, node):
input = self.graph.get_input_node(node, 0)
......@@ -575,6 +654,33 @@ class TFOpMapper(OpMapper):
inputs={"x": input.name},
outputs=[node.name],
pad=paddings)
def MirrorPad(self, node):
op_name = name_generator("pad", self.nn_name2id)
output_name = node.name
layer_outputs = [op_name, output_name]
input = self.graph.get_input_node(node, 0)
paddings = self.graph.get_input_node(node, 1)
assert paddings.layer_type == "Const", "Padding should be Const"
paddings = np.flip(paddings.value, 0).flatten().tolist()
dim = int(len(paddings) / 2)
transpose_name = gen_name("pad", "transpose")
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": input.name},
outputs=[transpose_name],
perm=[0, 3, 1, 2])
self.paddle_graph.add_layer(
kernel="paddle.nn.Pad{}D".format(dim),
inputs={"x": transpose_name},
outputs=layer_outputs,
pad=new_padding)
self.paddle_graph.add_layer(
kernel="paddle.transpose",
inputs={"x": node.name},
outputs=[node.name],
perm=[0, 2, 3, 1])
def Squeeze(self, node):
input = self.graph.get_input_node(node, 0)
......@@ -592,6 +698,25 @@ class TFOpMapper(OpMapper):
kernel="paddle.shape",
inputs={"input": input_name},
outputs=[node.name])
def Size(self, node):
input = self.graph.get_input_node(node, 0)
input_name = input.name
self.paddle_graph.add_layer(
kernel="paddle.shape",
inputs={"input": input_name},
outputs=[node.name])
self.paddle_graph.add_layer(
kernel="paddle.prod",
inputs={"x": node.name},
outputs=[node.name])
def Ceil(self, node):
input = self.graph.get_input_node(node, 0)
self.paddle_graph.add_layer(
kernel="paddle.ceil",
inputs={"x": input.name},
outputs=[node.name])
def ArgMax(self, node):
input = self.graph.get_input_node(node, 0)
......@@ -603,6 +728,19 @@ class TFOpMapper(OpMapper):
inputs={"x": input.name},
outputs=[node.name],
axis=axis)
def TopKV2(self, node):
input = self.graph.get_input_node(node, 0)
k = self.graph.get_input_node(node, 1)
assert k.layer_type == "Const", "ArgMax only support Const parameter"
k = k.value
sort = node.get_attr('sorted')
self.paddle_graph.add_layer(
kernel="paddle.topk",
inputs={"x": input.name},
outputs=[node.name],
k=k,
sorted=sort)
def MatMul(self, node):
x = self.graph.get_input_node(node, 0)
......@@ -765,10 +903,13 @@ class TFOpMapper(OpMapper):
axis = 1
else:
raise Exception("Unexpected situation happend in Unpack OP")
layer_outputs = ["{}_p{}".format(node.layer_name, i) for i in range(num)]
if len(layer_outputs) == 1:
layer_outputs[0] = "[{}]".format(node.layer_name)
self.paddle_graph.add_layer(
kernel="paddle.unstack",
inputs={"x": input_name},
outputs=["{}_p{}".format(node.layer_name, i) for i in range(num)],
outputs=layer_outputs,
axis=axis,
num=num)
......@@ -776,7 +917,6 @@ class TFOpMapper(OpMapper):
inputs_list = list()
for i in range(len(node.inputs) - 1):
inputs_list.append(self.graph.get_input_node(node, i))
# inputs_list = [self.graph.get_node(name) for name in node.layer.input[:-1]]
axis = self.graph.get_input_node(node, -1)
assert axis.layer_type == "Const", "axis for ConcatV2 must be type Const"
axis = axis.value
......@@ -789,6 +929,17 @@ class TFOpMapper(OpMapper):
inputs={"x": input_names},
outputs=[node.name],
axis=axis)
def AddN(self, node):
inputs_list = list()
for i in range(len(node.inputs) - 1):
inputs_list.append(self.graph.get_input_node(node, i))
input_names = [i.name for i in inputs_list]
self.paddle_graph.add_layer(
kernel="paddle.add_n",
inputs={"inputs": input_names},
outputs=[node.name])
def StridedSlice(self, node):
input = self.graph.get_input_node(node, 0)
......@@ -894,6 +1045,20 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name},
outputs=[node.name],
axis=shrink_axes)
def Prod(self, node):
input = self.graph.get_input_node(node, 0)
reduction_indices = self.graph.get_input_node(node, 1)
assert reduction_indices.layer_type == "Const"
keep_dims = node.get_attr('keep_dims')
axis = reduction_indices.value
self.paddle_graph.add_layer(
kernel="paddle.prod",
inputs={"x": input.name},
outputs=[node.layer_name],
keepdim=keep_dims,
axis=axis)
def Split(self, node):
dim = self.graph.get_input_node(node, 0)
......@@ -1177,15 +1342,15 @@ class TFOpMapper(OpMapper):
def Tile(self, node):
input = self.graph.get_input_node(node, 0)
expand_times = self.graph.get_input_node(node, 1)
repeat_times = self.graph.get_input_node(node, 1)
inputs = {"x": input.name}
attr = dict()
in_shape = input.out_shapes[0]
if expand_times.layer_type == "Const":
expand_times = expand_times.value.tolist()
attr["repeat_times"] = expand_times
if repeat_times.layer_type == "Const":
repeat_times = repeat_times.value.tolist()
attr["repeat_times"] = repeat_times
else:
inputs["repeat_times"] = expand_times.name
inputs["repeat_times"] = repeat_times.name
self.paddle_graph.add_layer(
kernel="paddle.tile",
......@@ -1206,6 +1371,7 @@ class TFOpMapper(OpMapper):
if start.layer_type == "Const":
attr["start"] = start.value
else:
inputs["start"] = start.name
if limit.dtype.startswith('float'):
dtype = limit.dtype
......@@ -1309,8 +1475,7 @@ class TFOpMapper(OpMapper):
index = self.graph.get_input_node(node, 1)
axis = self.graph.get_input_node(node, 2)
assert axis.layer_type == 'Const', "Only support Const parameter[axis]"
axis = axis.value.tolist()
assert axis == 0, "Only support axis=0 in GatherV2 OP"
axis = axis.value
index_name = index.name
if len(index.out_shapes[0]) != 1:
reshape_name = gen_name("gather", "reshape")
......@@ -1324,7 +1489,8 @@ class TFOpMapper(OpMapper):
self.paddle_graph.add_layer(
"paddle.gather",
inputs=inputs,
outputs=[node.name])
outputs=[node.name],
axis=axis)
if len(index.out_shapes[0]) != 1:
out_shape = node.out_shapes[0]
self.paddle_graph.add_layer(
......@@ -1332,6 +1498,15 @@ class TFOpMapper(OpMapper):
inputs={"x": node.name},
outputs=[node.name],
shape=out_shape)
def GatherNd(self, node):
x = self.graph.get_input_node(node, 0)
index = self.graph.get_input_node(node, 1)
inputs = {'x': x.name, 'index': index.name}
self.paddle_graph.add_layer(
"paddle.gather_nd",
inputs=inputs,
outputs=[node.name])
def ExpandDims(self, node):
x = self.graph.get_input_node(node, 0, copy=True)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册