提交 264c0c85 编写于 作者: S SunAhong1993

for stucture

上级 92bd03a5
...@@ -18,6 +18,7 @@ from google.protobuf import text_format ...@@ -18,6 +18,7 @@ from google.protobuf import text_format
import numpy as np import numpy as np
from x2paddle.core.graph import GraphNode, Graph from x2paddle.core.graph import GraphNode, Graph
from x2paddle.core.fluid_code import FluidCode from x2paddle.core.fluid_code import FluidCode
from x2paddle.decoder import caffe_shape_inference
class CaffeResolver(object): class CaffeResolver(object):
...@@ -60,6 +61,28 @@ class CaffeGraphNode(GraphNode): ...@@ -60,6 +61,28 @@ class CaffeGraphNode(GraphNode):
def set_params(self, params): def set_params(self, params):
self.data = params self.data = params
@property
def name(self):
if hasattr(self, 'index'):
return "{}_p{}".format(self.layer_name, self.index)
return self.layer_name
@property
def out_shapes(self):
return self._out_shapes
@out_shapes.setter
def out_shapes(self, value):
self._out_shapes = value
@property
def in_shapes(self):
return self._in_shapes
@in_shapes.setter
def in_shapes(self, value):
self._in_shapes = value
class CaffeGraph(Graph): class CaffeGraph(Graph):
def __init__(self, model, params, caffe_pb): def __init__(self, model, params, caffe_pb):
...@@ -226,8 +249,11 @@ class CaffeGraph(Graph): ...@@ -226,8 +249,11 @@ class CaffeGraph(Graph):
layer_name) layer_name)
super(CaffeGraph, self).build() super(CaffeGraph, self).build()
for i, node_name in enumerate(self.topo_sort):
node = self.get_node(node_name)
self.set_node_shape(node)
def get_bottom_node(self, node, idx=0, copy=False): def get_input_node(self, node, idx=0, copy=False):
input_node_name = node.inputs[idx] input_node_name = node.inputs[idx]
assert input_node_name in self.node_map, 'The {} isn\'t a valid node'.format( assert input_node_name in self.node_map, 'The {} isn\'t a valid node'.format(
name) name)
...@@ -239,6 +265,19 @@ class CaffeGraph(Graph): ...@@ -239,6 +265,19 @@ class CaffeGraph(Graph):
name = input_node_name name = input_node_name
return self.get_node(name, copy=copy) return self.get_node(name, copy=copy)
def set_node_shape(self, node):
inputs = node.inputs
input_shape = []
for i, nm in enumerate(inputs):
last_node = self.get_node(nm)
tmp = node.layer.bottom[i]
idx = list(last_node.layer.top).index(tmp)
input_shape.append(last_node.out_shapes[idx])
node.in_shapes = input_shape
func_name = 'shape_' + node.layer_type.lower()
node.out_shapes = getattr(caffe_shape_inference, func_name)(node.layer,
input_shape)
class CaffeDecoder(object): class CaffeDecoder(object):
def __init__(self, proto_path, model_path, caffe_proto): def __init__(self, proto_path, model_path, caffe_proto):
......
...@@ -65,6 +65,12 @@ class ONNXGraphNode(GraphNode): ...@@ -65,6 +65,12 @@ class ONNXGraphNode(GraphNode):
return None return None
return self.attr_map['value'] return self.attr_map['value']
@property
def name(self):
if hasattr(self, 'index'):
return "{}_p{}".format(self.layer_name, self.index)
return self.layer_name
def get_attribute_value(self, attr): def get_attribute_value(self, attr):
""" """
get_attribute_value enhanced get_attribute_value enhanced
...@@ -119,6 +125,10 @@ class ONNXGraphDataNode(GraphNode): ...@@ -119,6 +125,10 @@ class ONNXGraphDataNode(GraphNode):
out_shapes.append(values) out_shapes.append(values)
return out_shapes return out_shapes
@property
def name(self):
return self.layer_name
@property @property
def dtype(self): def dtype(self):
if isinstance(self.layer, ValueInfoProto): if isinstance(self.layer, ValueInfoProto):
...@@ -309,6 +319,7 @@ class ONNXGraph(Graph): ...@@ -309,6 +319,7 @@ class ONNXGraph(Graph):
ipt_node.index = node.which_child[ipt_node.layer_name] ipt_node.index = node.which_child[ipt_node.layer_name]
return ipt_node return ipt_node
def graph_weights(self): def graph_weights(self):
""" """
generator for weights generator for weights
......
...@@ -190,6 +190,10 @@ class TFGraph(Graph): ...@@ -190,6 +190,10 @@ class TFGraph(Graph):
node.index = 0 node.index = 0
return node return node
def get_input_node(self, node, idx=0, copy=False):
input_node_name = node.inputs[idx]
return self.get_node(input_node_name, copy)
def remove_node(self, node_name): def remove_node(self, node_name):
if node_name not in self.node_map: if node_name not in self.node_map:
raise Exception("Node[{}] not in graph".format(node_name)) raise Exception("Node[{}] not in graph".format(node_name))
...@@ -316,7 +320,7 @@ class TFDecoder(object): ...@@ -316,7 +320,7 @@ class TFDecoder(object):
self.sess = tf.compat.v1.Session() self.sess = tf.compat.v1.Session()
except: except:
self.sess = tf.Session() self.sess = tf.Session()
self.input_info = dict() self.inputs_info = dict()
self.define_input_shape = define_input_shape self.define_input_shape = define_input_shape
with open(pb_model, 'rb') as f: with open(pb_model, 'rb') as f:
try: try:
...@@ -426,50 +430,40 @@ class TFDecoder(object): ...@@ -426,50 +430,40 @@ class TFDecoder(object):
input_map["{}:0".format(layer.name)] = x2paddle_input input_map["{}:0".format(layer.name)] = x2paddle_input
if shape.count(None) > 0: if shape.count(None) > 0:
shape[shape.index(None)] = -1 shape[shape.index(None)] = -1
self.input_info["x2paddle_{}".format(layer.name)] = (shape, self.inputs_info["x2paddle_{}".format(layer.name)] = (shape,
dtype) dtype)
else: else:
value = graph_node.layer.attr["shape"].shape value = graph_node.layer.attr["shape"].shape
shape = [dim.size for dim in value.dim] shape = [dim.size for dim in value.dim]
self.input_info[layer.name] = (shape, dtype) self.inputs_info[layer.name] = (shape, dtype)
return input_map return input_map
# trick method # trick method
# should be removed after PaddlePaddle V1.6 been released # should be removed after PaddlePaddle V1.6 been released
def infer_tensor(self, graph_node): def infer_tensor(self, graph_node, out_shape=None, use_diff_inputs=True):
if hasattr(graph_node, "index"):
tensor_name = graph_node.layer.name + ":{}".format(graph_node.index)
else:
tensor_name = graph_node.layer.name + ":0"
feed = dict()
for input_name, info in self.input_info.items():
(shape, dtype) = cp.deepcopy(info)
input_tensor = self.sess.graph.get_tensor_by_name(input_name + ":0")
if shape.count(-1) > 0:
shape[shape.index(-1)] = 2
feed[input_tensor] = numpy.random.random_sample(shape)
output_tensor = self.sess.graph.get_tensor_by_name(tensor_name)
return self.sess.run([output_tensor], feed)[0]
def infer_shape_tensor(self, graph_node, out_shape=None):
if hasattr(graph_node, "index"): if hasattr(graph_node, "index"):
tensor_name = graph_node.layer.name + ":{}".format(graph_node.index) tensor_name = graph_node.layer.name + ":{}".format(graph_node.index)
else: else:
tensor_name = graph_node.layer.name + ":0" tensor_name = graph_node.layer.name + ":0"
feed = dict() feed = dict()
if use_diff_inputs:
batch_size = [2, 3, 5] batch_size = [2, 3, 5]
else:
batch_size = [2]
results = list() results = list()
for b in batch_size: for b in batch_size:
for input_name, info in self.input_info.items(): for input_name, info in self.inputs_info.items():
(shape, dtype) = cp.deepcopy(info) (shape, dtype) = cp.deepcopy(info)
input_tensor = self.sess.graph.get_tensor_by_name(input_name + input_tensor = self.sess.graph.get_tensor_by_name(input_name + ":0")
":0")
if shape.count(-1) > 0: if shape.count(-1) > 0:
shape[shape.index(-1)] = b shape[shape.index(-1)] = b
feed[input_tensor] = numpy.random.random_sample(shape) feed[input_tensor] = numpy.random.random_sample(shape)
output_tensor = self.sess.graph.get_tensor_by_name(tensor_name) output_tensor = self.sess.graph.get_tensor_by_name(tensor_name)
if use_diff_inputs:
results.append(self.sess.run([output_tensor], feed)[0].flatten()) results.append(self.sess.run([output_tensor], feed)[0].flatten())
else:
return self.sess.run([output_tensor], feed)[0]
compare01 = (results[0] == results[1]) compare01 = (results[0] == results[1])
compare12 = (results[1] == results[2]) compare12 = (results[1] == results[2])
...@@ -494,38 +488,3 @@ class TFDecoder(object): ...@@ -494,38 +488,3 @@ class TFDecoder(object):
return results[0].tolist() return results[0].tolist()
else: else:
raise Exception("Couldn't infer a stable shape shape tensor value") raise Exception("Couldn't infer a stable shape shape tensor value")
def infer_tensor_shape(self, graph_node):
if hasattr(graph_node, "index"):
tensor_name = graph_node.layer.name + ":{}".format(graph_node.index)
else:
tensor_name = graph_node.layer.name + ":0"
feed = dict()
batch_size = [2, 3, 5]
shapes = list()
for b in batch_size:
for input_name, info in self.input_info.items():
(shape, dtype) = cp.deepcopy(info)
input_tensor = self.sess.graph.get_tensor_by_name(input_name +
":0")
if shape.count(-1) > 0:
shape[shape.index(-1)] = b
feed[input_tensor] = numpy.random.random_sample(shape)
output_tensor = self.sess.graph.get_tensor_by_name(tensor_name)
shape = self.sess.run([output_tensor], feed)[0].shape
shapes.append(numpy.array(shape))
compare01 = (shapes[0] == shapes[1])
compare12 = (shapes[1] == shapes[2])
if compare01.all() and compare12.all():
return shape[0].tolist()
if (compare01 == compare12).all():
index = numpy.argwhere(compare01 == False).flatten()
if index.shape[0] != 1:
raise Exception("There's not only one unstable dimension")
if index[0] != 0:
raise Exception("Batch size not in the first dimension")
shapes[0][0] = -1
return shapes[0].tolist()
...@@ -12,81 +12,16 @@ ...@@ -12,81 +12,16 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys
import numbers import numbers
import numpy as np import numpy as np
from x2paddle.core.op_mapper import OpMapper from x2paddle.core.op_mapper import OpMapper
from x2paddle.core.util import * from x2paddle.core.util import *
from x2paddle.op_mapper.dygraph.caffe2paddle import caffe_shape
from x2paddle.core.program import PaddleGraph from x2paddle.core.program import PaddleGraph
from x2paddle.decoder.caffe_decoder import CaffeGraphNode
class CaffeOpMapper(OpMapper): def _adjust_parameters(node):
directly_map_ops = {
'Sigmoid': 'paddle.nn.layer.Sigmoid',
'TanH': 'paddle.nn.Tanh',
}
def __init__(self, decoder):
super(CaffeOpMapper, self).__init__()
self.graph = decoder.caffe_graph
self.params = dict()
self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="dygraph", source_type="caffe")
self.paddle_graph.outputs = self.graph.output_nodes
self.input_index = 0
self.inputs_info = {}
self.nn_name2id = {}
print("Total nodes: {}".format(len(self.graph.topo_sort)))
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
if node.layer_type == 'DepthwiseConvolution':
node.layer_type = 'ConvolutionDepthwise'
op = node.layer_type
if hasattr(self, op):
self.set_node_shape(node)
func = getattr(self, op)
func(node)
elif op in self.directly_map_ops:
self.set_node_shape(node)
self.directly_map(node)
else:
raise Exception(
"The op {} in model is not supported yet.".format(op))
self.paddle_graph.set_name(self.graph.graph_name)
self.paddle_graph.set_parameters(self.params)
self.paddle_graph.set_inputs_info(self.inputs_info)
def op_checker(self):
unsupported_ops = set()
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
op = node.layer_type
if not hasattr(self, op) and op not in custom_layers:
unsupported_ops.add(op)
if len(unsupported_ops) == 0:
return True
else:
print("There are {} ops not supported yet, list as below".format(
len(unsupported_ops)))
for op in unsupported_ops:
print(op)
return False
def set_node_shape(self, node):
inputs = node.inputs
input_shape = []
for i, nm in enumerate(inputs):
last_node = self.graph.get_node(nm)
tmp = node.layer.bottom[i]
idx = list(last_node.layer.top).index(tmp)
input_shape.append(last_node.output_shape[idx])
node.input_shape = input_shape
func_name = 'shape_' + node.layer_type.lower()
node.output_shape = getattr(caffe_shape, func_name)(node.layer,
input_shape)
def adjust_parameters(self, node):
data = node.data data = node.data
# When using the protobuf-backend, each parameter initially has four dimensions. # When using the protobuf-backend, each parameter initially has four dimensions.
# In certain cases (like FC layers), we want to eliminate the singleton dimensions. # In certain cases (like FC layers), we want to eliminate the singleton dimensions.
...@@ -122,7 +57,7 @@ class CaffeOpMapper(OpMapper): ...@@ -122,7 +57,7 @@ class CaffeOpMapper(OpMapper):
shape_new = data[idx].shape shape_new = data[idx].shape
return data return data
def get_kernel_parameters(self, kind, params): def _get_kernel_parameters(kind, params):
assert kind in ["Convolution", "Pooling", "Deconvolution", "ConvolutionDepthwise"] assert kind in ["Convolution", "Pooling", "Deconvolution", "ConvolutionDepthwise"]
[k_h, k_w] = [1, 1] [k_h, k_w] = [1, 1]
if isinstance(params.kernel_size, numbers.Number): if isinstance(params.kernel_size, numbers.Number):
...@@ -178,11 +113,81 @@ class CaffeOpMapper(OpMapper): ...@@ -178,11 +113,81 @@ class CaffeOpMapper(OpMapper):
dilation = [dila_h, dila_w] dilation = [dila_h, dila_w]
return c_o, kernel, stride, pad, dilation, group return c_o, kernel, stride, pad, dilation, group
def get_input_name(self, node):
if hasattr(node, "index"): class CaffeOpMapper(OpMapper):
return "{}_{}".format(node.layer_name, node.index) directly_map_ops = {
'Sigmoid': ['paddle.nn.layer.Sigmoid'],
'TanH': ['paddle.nn.Tanh'],
}
def __init__(self, decoder):
super(CaffeOpMapper, self).__init__()
self.graph = decoder.caffe_graph
if not self.op_checker():
raise Exception("Model is not supported yet.")
self.params = dict()
self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="dygraph", source_type="caffe")
self.paddle_graph.outputs = self.graph.output_nodes
self.input_index = 0
self.inputs_info = {}
self.nn_name2id = {}
print("Total nodes: {}".format(
sum([
isinstance(node, CaffeGraphNode)
for name, node in self.graph.node_map.items()
])))
print("Nodes converting ...")
for i, node_name in enumerate(self.graph.topo_sort):
sys.stderr.write("\rConverting node {} ... ".format(i + 1))
node = self.graph.get_node(node_name)
op = node.layer_type
if hasattr(self, op):
func = getattr(self, op)
func(node)
elif op in self.directly_map_ops:
self.directly_map(node)
print("\nNodes converted.")
self.paddle_graph.set_name(self.graph.graph_name)
self.paddle_graph.set_parameters(self.params)
self.paddle_graph.set_inputs_info(self.inputs_info)
def op_checker(self):
unsupported_ops = set()
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
op = node.layer_type
if not hasattr(self, op) and op not in self.directly_map_ops:
unsupported_ops.add(op)
if len(unsupported_ops) == 0:
return True
else: else:
return node.layer_name if len(unsupported_ops) > 0:
print("\n========= {} OPs are not supported yet ===========".format(
len(unsupported_ops)))
for op in unsupported_ops:
print("========== {} ============".format(op))
return False
def directly_map(self, node):
inputs = node.layer.input
assert len(inputs) == 1, 'directly_map error with multi inputs'
op_info = self.directly_map_ops[node.layer_type]
input = self.graph.get_input_node(node, 0)
paddle_op = op_info[0]
if paddle_op.startswith("paddle.nn"):
op_name = paddle_op[10:].lower()
op_name = name_generator(op_name, self.nn_name2id)
output_name = node.name
layer_outputs = [op_name, output_name]
self.paddle_graph.add_layer(
kernel=paddle_op,
inputs={"x": input.name},
outputs=layer_outputs)
else:
self.paddle_graph.add_layer(
kernel=paddle_op,
inputs={"x": input.name},
outputs=[node.name])
def Input(self, node): def Input(self, node):
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -200,7 +205,7 @@ class CaffeOpMapper(OpMapper): ...@@ -200,7 +205,7 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [conv2d_name, output_name] layer_outputs = [conv2d_name, output_name]
data = node.data data = node.data
params = node.layer.convolution_param params = node.layer.convolution_param
out_channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters( out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
node.layer_type, params) node.layer_type, params)
if data is None: if data is None:
data = [] data = []
...@@ -208,19 +213,19 @@ class CaffeOpMapper(OpMapper): ...@@ -208,19 +213,19 @@ class CaffeOpMapper(OpMapper):
"The parameter of {} (type is {}) is not set. So we set the parameters as 0" "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.format(node.layer_name, node.layer_type)) .format(node.layer_name, node.layer_type))
data.append( data.append(
np.zeros([out_channel, node.input_shape[0][1], kernel[0], kernel[1]]).astype( np.zeros([out_channel, node.in_shapes[0][1], kernel[0], kernel[1]]).astype(
'float32')) 'float32'))
data.append(np.zeros([out_channel, ]).astype('float32')) data.append(np.zeros([out_channel, ]).astype('float32'))
else: else:
data = self.adjust_parameters(node) data = _adjust_parameters(node)
self.params[conv2d_name + ".weight"] = data[0] self.params[conv2d_name + ".weight"] = data[0]
if len(data) == 2: if len(data) == 2:
self.params[conv2d_name + ".bias"] = data[1] self.params[conv2d_name + ".bias"] = data[1]
assert len(node.inputs assert len(node.inputs
) == 1, "The count of Convolution node\'s input is not 1." ) == 1, "The count of Convolution node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
layer_attrs = { layer_attrs = {
"in_channels": node.input_shape[0][1], "in_channels": node.in_shapes[0][1],
"out_channels": out_channel, "out_channels": out_channel,
"kernel_size": kernel, "kernel_size": kernel,
"stride": stride, "stride": stride,
...@@ -232,17 +237,21 @@ class CaffeOpMapper(OpMapper): ...@@ -232,17 +237,21 @@ class CaffeOpMapper(OpMapper):
layer_attrs["bias_attr"] = False layer_attrs["bias_attr"] = False
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.Conv2D", "paddle.nn.Conv2D",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
def DepthwiseConvolution(self, node):
node.layer_type = "ConvolutionDepthwise"
self.ConvolutionDepthwise(node)
def Deconvolution(self, node): def Deconvolution(self, node):
conv2d_name = name_generator("conv", self.nn_name2id) conv2d_name = name_generator("conv", self.nn_name2id)
output_name = node.layer_name output_name = node.layer_name
layer_outputs = [conv2d_name, output_name] layer_outputs = [conv2d_name, output_name]
data = node.data data = node.data
params = node.layer.convolution_param params = node.layer.convolution_param
out_channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters( out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
node.layer_type, params) node.layer_type, params)
if data is None: if data is None:
data = [] data = []
...@@ -250,19 +259,19 @@ class CaffeOpMapper(OpMapper): ...@@ -250,19 +259,19 @@ class CaffeOpMapper(OpMapper):
"The parameter of {} (type is {}) is not set. So we set the parameters as 0" "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.format(node.layer_name, node.layer_type)) .format(node.layer_name, node.layer_type))
data.append( data.append(
np.zeros([out_channel, node.input_shape[0][1], kernel[0], kernel[1]]).astype( np.zeros([out_channel, node.in_shapes[0][1], kernel[0], kernel[1]]).astype(
'float32')) 'float32'))
data.append(np.zeros([out_channel, ]).astype('float32')) data.append(np.zeros([out_channel, ]).astype('float32'))
else: else:
data = self.adjust_parameters(node) data = _adjust_parameters(node)
self.params[conv2d_name + ".weight"] = data[0] self.params[conv2d_name + ".weight"] = data[0]
if len(data) == 2: if len(data) == 2:
self.params[conv2d_name + ".bias"] = data[1] self.params[conv2d_name + ".bias"] = data[1]
assert len(node.inputs assert len(node.inputs
) == 1, "The count of Deconvolution node\'s input is not 1." ) == 1, "The count of Deconvolution node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
layer_attrs = { layer_attrs = {
"in_channels": node.input_shape[0][1], "in_channels": node.in_shapes[0][1],
"out_channels": out_channel, "out_channels": out_channel,
"kernel_size": kernel, "kernel_size": kernel,
"stride": stride, "stride": stride,
...@@ -274,7 +283,7 @@ class CaffeOpMapper(OpMapper): ...@@ -274,7 +283,7 @@ class CaffeOpMapper(OpMapper):
layer_attrs["bias_attr"] = False layer_attrs["bias_attr"] = False
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.Conv2DTranspose", "paddle.nn.Conv2DTranspose",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
...@@ -284,10 +293,10 @@ class CaffeOpMapper(OpMapper): ...@@ -284,10 +293,10 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [conv2d_name, output_name] layer_outputs = [conv2d_name, output_name]
data = node.data data = node.data
params = node.layer.convolution_param params = node.layer.convolution_param
out_channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters( out_channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
node.layer_type, params) node.layer_type, params)
out_channel = params.num_output if params.num_output is not None else node.input_shape[0][1] out_channel = params.num_output if params.num_output is not None else node.in_shapes[0][1]
in_channel = node.input_shape[0][1] in_channel = node.in_shapes[0][1]
group = int(in_channel / (in_channel / out_channel)) if in_channel > out_channel else int(in_channel / group = int(in_channel / (in_channel / out_channel)) if in_channel > out_channel else int(in_channel /
(out_channel / in_channel)) (out_channel / in_channel))
if data is None: if data is None:
...@@ -296,17 +305,17 @@ class CaffeOpMapper(OpMapper): ...@@ -296,17 +305,17 @@ class CaffeOpMapper(OpMapper):
"The parameter of {} (type is {}) is not set. So we set the parameters as 0" "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.format(node.layer_name, node.layer_type)) .format(node.layer_name, node.layer_type))
data.append( data.append(
np.zeros([out_channel, node.input_shape[0][1], kernel[0], kernel[1]]).astype( np.zeros([out_channel, node.in_shapes[0][1], kernel[0], kernel[1]]).astype(
'float32')) 'float32'))
data.append(np.zeros([out_channel, ]).astype('float32')) data.append(np.zeros([out_channel, ]).astype('float32'))
else: else:
data = self.adjust_parameters(node) data = _adjust_parameters(node)
self.params[conv2d_name + ".weight"] = data[0] self.params[conv2d_name + ".weight"] = data[0]
if len(data) == 2: if len(data) == 2:
self.params[conv2d_name + ".bias"] = data[1] self.params[conv2d_name + ".bias"] = data[1]
assert len(node.inputs assert len(node.inputs
) == 1, "The count of Deconvolution node\'s input is not 1." ) == 1, "The count of Deconvolution node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
layer_attrs = { layer_attrs = {
"in_channels": in_channel, "in_channels": in_channel,
"out_channels": out_channel, "out_channels": out_channel,
...@@ -320,7 +329,7 @@ class CaffeOpMapper(OpMapper): ...@@ -320,7 +329,7 @@ class CaffeOpMapper(OpMapper):
layer_attrs["bias_attr"] = False layer_attrs["bias_attr"] = False
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.Conv2D", "paddle.nn.Conv2D",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
...@@ -332,7 +341,7 @@ class CaffeOpMapper(OpMapper): ...@@ -332,7 +341,7 @@ class CaffeOpMapper(OpMapper):
ceil_mode = getattr(params, "ceil_mod", True) ceil_mode = getattr(params, "ceil_mod", True)
global_pool = getattr(params, "global_pooling", False) global_pool = getattr(params, "global_pooling", False)
kernel_default = [1, 1] kernel_default = [1, 1]
channel, kernel, stride, pad, dilation, group = self.get_kernel_parameters( channel, kernel, stride, pad, dilation, group = _get_kernel_parameters(
node.layer_type, params) node.layer_type, params)
if params.pool == 0: if params.pool == 0:
pool_type = "max" pool_type = "max"
...@@ -340,20 +349,20 @@ class CaffeOpMapper(OpMapper): ...@@ -340,20 +349,20 @@ class CaffeOpMapper(OpMapper):
pool_type = "avg" pool_type = "avg"
assert len( assert len(
node.inputs) == 1, "The count of Pooling node\'s input is not 1." node.inputs) == 1, "The count of Pooling node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
if global_pool: if global_pool:
if kernel[0] == 0: if kernel[0] == 0:
kernel = [1, 1] kernel = [1, 1]
if params.pool == 0: if params.pool == 0:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.AdaptiveMaxPool2D", "paddle.nn.AdaptiveMaxPool2D",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
output_size=kernel) output_size=kernel)
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.AdaptiveAvgPool2D", "paddle.nn.AdaptiveAvgPool2D",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
output_size=kernel) output_size=kernel)
else: else:
...@@ -368,7 +377,7 @@ class CaffeOpMapper(OpMapper): ...@@ -368,7 +377,7 @@ class CaffeOpMapper(OpMapper):
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.fluid.dygraph.Pool2D", "paddle.fluid.dygraph.Pool2D",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
# layer_attrs = { # layer_attrs = {
...@@ -380,20 +389,20 @@ class CaffeOpMapper(OpMapper): ...@@ -380,20 +389,20 @@ class CaffeOpMapper(OpMapper):
# if params.pool == 0: # if params.pool == 0:
# self.paddle_graph.add_layer( # self.paddle_graph.add_layer(
# "paddle.nn.MaxPool2D", # "paddle.nn.MaxPool2D",
# inputs={"input": self.get_input_name(input)}, # inputs={"input": input.name},
# outputs=layer_outputs, # outputs=layer_outputs,
# **layer_attrs) # **layer_attrs)
# else: # else:
# layer_attrs["count_include_pad"] = True # layer_attrs["count_include_pad"] = True
# self.paddle_graph.add_layer( # self.paddle_graph.add_layer(
# "paddle.nn.AvgPool2D", # "paddle.nn.AvgPool2D",
# inputs={"input": self.get_input_name(input)}, # inputs={"input": input.name},
# outputs=layer_outputs, # outputs=layer_outputs,
# **layer_attrs) # **layer_attrs)
def LRN(self, node): def LRN(self, node):
assert len(node.inputs) == 1, "The count of LRN node\'s input is not 1." assert len(node.inputs) == 1, "The count of LRN node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.lrn_param params = node.layer.lrn_param
assert params.local_size % 2 == 1 assert params.local_size % 2 == 1
alpha = params.alpha / float(params.local_size) alpha = params.alpha / float(params.local_size)
...@@ -405,7 +414,7 @@ class CaffeOpMapper(OpMapper): ...@@ -405,7 +414,7 @@ class CaffeOpMapper(OpMapper):
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"fluid.layers.lrn", "fluid.layers.lrn",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
**layer_attrs) **layer_attrs)
...@@ -414,7 +423,7 @@ class CaffeOpMapper(OpMapper): ...@@ -414,7 +423,7 @@ class CaffeOpMapper(OpMapper):
output_name = node.layer_name output_name = node.layer_name
layer_outputs = [linear_name, output_name] layer_outputs = [linear_name, output_name]
data = node.data data = node.data
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.inner_product_param params = node.layer.inner_product_param
if data is None: if data is None:
print( print(
...@@ -422,12 +431,12 @@ class CaffeOpMapper(OpMapper): ...@@ -422,12 +431,12 @@ class CaffeOpMapper(OpMapper):
.format(node.layer_name, node.layer_type)) .format(node.layer_name, node.layer_type))
data = [] data = []
data.append( data.append(
np.zeros([node.input_shape[0][1], params.num_output]).astype("float32").astype( np.zeros([node.in_shapes[0][1], params.num_output]).astype("float32").astype(
"float32")) "float32"))
data.append( data.append(
np.zeros([params.num_output]).astype("float32").astype("float32")) np.zeros([params.num_output]).astype("float32").astype("float32"))
else: else:
data = self.adjust_parameters(node) data = _adjust_parameters(node)
# Reshape the parameters to Paddle's ordering # Reshape the parameters to Paddle's ordering
transpose_order = (1, 0) transpose_order = (1, 0)
w = data[0] w = data[0]
...@@ -450,10 +459,10 @@ class CaffeOpMapper(OpMapper): ...@@ -450,10 +459,10 @@ class CaffeOpMapper(OpMapper):
} }
if len(data) == 1: if len(data) == 1:
layer_attrs["bias"] = False layer_attrs["bias"] = False
if node.input_shape[0][-1] != data[0].shape[0]: if node.in_shapes[0][-1] != data[0].shape[0]:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.reshape", "paddle.reshape",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[output_name], outputs=[output_name],
shape=[-1, data[0].shape[0]]) shape=[-1, data[0].shape[0]])
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -464,7 +473,7 @@ class CaffeOpMapper(OpMapper): ...@@ -464,7 +473,7 @@ class CaffeOpMapper(OpMapper):
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.Linear", "paddle.nn.Linear",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
...@@ -472,10 +481,10 @@ class CaffeOpMapper(OpMapper): ...@@ -472,10 +481,10 @@ class CaffeOpMapper(OpMapper):
assert len( assert len(
node.inputs node.inputs
) >= 1, "The count of AbsVal node\'s input is not more than 1." ) >= 1, "The count of AbsVal node\'s input is not more than 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.abs", "paddle.abs",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=[node.layer_name]) outputs=[node.layer_name])
def Softmax(self, node): def Softmax(self, node):
...@@ -484,42 +493,42 @@ class CaffeOpMapper(OpMapper): ...@@ -484,42 +493,42 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [softmax_name, output_name] layer_outputs = [softmax_name, output_name]
assert len( assert len(
node.inputs) == 1, "The count of Softmax node\'s input is not 1." node.inputs) == 1, "The count of Softmax node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.softmax_param params = node.layer.softmax_param
axis = params.axis axis = params.axis
shape = node.input_shape[0] shape = node.in_shapes[0]
dims = len(shape) dims = len(shape)
axis = axis + dims if axis < 0 else axis axis = axis + dims if axis < 0 else axis
layer_attrs = {'axis': axis} layer_attrs = {'axis': axis}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.Softmax", "paddle.nn.Softmax",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
def Slice(self, node): def Slice(self, node):
assert len( assert len(
node.inputs) == 1, "The count of Slice node\'s input is not 1." node.inputs) == 1, "The count of Slice node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
top_len = len(node.layer.top) top_len = len(node.layer.top)
params = node.layer.slice_param params = node.layer.slice_param
axis = params.axis axis = params.axis
slice_dim = params.slice_dim slice_dim = params.slice_dim
if slice_dim != 1 and axis == 1: if slice_dim != 1 and axis == 1:
axis = slice_dim axis = slice_dim
output_shape = node.output_shape output_shape = node.out_shapes
sections_list = list() sections_list = list()
outputs_list = list() outputs_list = list()
for i, s in enumerate(output_shape): for i, s in enumerate(output_shape):
sections_list.append(s[axis]) sections_list.append(s[axis])
outputs_list.append("{}_{}".format(node.layer_name, i)) outputs_list.append("{}_p{}".format(node.layer_name, i))
layer_attrs = { layer_attrs = {
'num_or_sections': sections_list, 'num_or_sections': sections_list,
'axis': axis, 'axis': axis,
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.split", "paddle.split",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=outputs_list, outputs=outputs_list,
**layer_attrs) **layer_attrs)
...@@ -529,8 +538,8 @@ class CaffeOpMapper(OpMapper): ...@@ -529,8 +538,8 @@ class CaffeOpMapper(OpMapper):
) >= 1, "The count of Concat node\'s input is not more than 1." ) >= 1, "The count of Concat node\'s input is not more than 1."
inputs_list = list() inputs_list = list()
for i in range(len(node.inputs)): for i in range(len(node.inputs)):
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_input_node(node, idx=i, copy=True)
inputs_list.append(self.get_input_name(input)) inputs_list.append(input.name)
params = node.layer.concat_param params = node.layer.concat_param
axis = params.axis axis = params.axis
layer_attrs = {'axis': axis} layer_attrs = {'axis': axis}
...@@ -546,7 +555,7 @@ class CaffeOpMapper(OpMapper): ...@@ -546,7 +555,7 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [relu_name, output_name] layer_outputs = [relu_name, output_name]
assert len( assert len(
node.inputs) == 1, "The count of RelU node\'s input is not 1." node.inputs) == 1, "The count of RelU node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.relu_param params = node.layer.relu_param
if params.HasField('negative_slope') and params.negative_slope != 0: if params.HasField('negative_slope') and params.negative_slope != 0:
negative_slope = float(params.negative_slope) negative_slope = float(params.negative_slope)
...@@ -554,13 +563,13 @@ class CaffeOpMapper(OpMapper): ...@@ -554,13 +563,13 @@ class CaffeOpMapper(OpMapper):
layer_attrs = {'alpha': negative_slope} layer_attrs = {'alpha': negative_slope}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.LeakyReLU", "paddle.nn.LeakyReLU",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.ReLU", "paddle.nn.ReLU",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs) outputs=layer_outputs)
def PReLU(self, node): def PReLU(self, node):
...@@ -569,10 +578,10 @@ class CaffeOpMapper(OpMapper): ...@@ -569,10 +578,10 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [prelu_name, output_name] layer_outputs = [prelu_name, output_name]
assert len( assert len(
node.inputs) == 1, "The count of PReLU node\'s input is not 1." node.inputs) == 1, "The count of PReLU node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.prelu_param params = node.layer.prelu_param
mode_bool = params.channel_shared mode_bool = params.channel_shared
output_shape = node.output_shape[0] output_shape = node.out_shapes[0]
if mode_bool: if mode_bool:
num_parameters = 1 num_parameters = 1
else: else:
...@@ -583,7 +592,7 @@ class CaffeOpMapper(OpMapper): ...@@ -583,7 +592,7 @@ class CaffeOpMapper(OpMapper):
node.layer_name, node.layer_type) node.layer_name, node.layer_type)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.PReLU", "paddle.nn.PReLU",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
num_parameters=num_parameters) num_parameters=num_parameters)
...@@ -593,10 +602,10 @@ class CaffeOpMapper(OpMapper): ...@@ -593,10 +602,10 @@ class CaffeOpMapper(OpMapper):
params = node.layer.eltwise_param params = node.layer.eltwise_param
mode = params.operation mode = params.operation
inputs = [] inputs = []
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
input1 = self.graph.get_bottom_node(node, idx=1, copy=True) input1 = self.graph.get_input_node(node, idx=1, copy=True)
input0_name = self.get_input_name(input0) input0_name = input0.name
input1_name = self.get_input_name(input1) input1_name = input1.name
if mode == 0: if mode == 0:
inputs_dict = {} inputs_dict = {}
inputs_dict['x'] = input0_name inputs_dict['x'] = input0_name
...@@ -648,7 +657,7 @@ class CaffeOpMapper(OpMapper): ...@@ -648,7 +657,7 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [batchnorm_name, output_name] layer_outputs = [batchnorm_name, output_name]
assert len( assert len(
node.inputs) == 1, "The count of BatchNorm node\'s input is not 1." node.inputs) == 1, "The count of BatchNorm node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.batch_norm_param params = node.layer.batch_norm_param
if hasattr(params, "eps"): if hasattr(params, "eps"):
eps = params.eps eps = params.eps
...@@ -658,8 +667,8 @@ class CaffeOpMapper(OpMapper): ...@@ -658,8 +667,8 @@ class CaffeOpMapper(OpMapper):
print( print(
"The parameter of {} (type is {}) is not set. So we set the parameters as 0" "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.format(node.layer_name, node.layer_type)) .format(node.layer_name, node.layer_type))
mean = np.zeros([node.input_shape[0][1], ]).astype("float32") mean = np.zeros([node.in_shapes[0][1], ]).astype("float32")
variance = np.zeros([node.input_shape[0][1], ]).astype("float32") variance = np.zeros([node.in_shapes[0][1], ]).astype("float32")
scale = 0 scale = 0
else: else:
...@@ -672,14 +681,14 @@ class CaffeOpMapper(OpMapper): ...@@ -672,14 +681,14 @@ class CaffeOpMapper(OpMapper):
self.params[batchnorm_name + "._mean"] = mean self.params[batchnorm_name + "._mean"] = mean
self.params[batchnorm_name + '._variance'] = variance self.params[batchnorm_name + '._variance'] = variance
layer_attrs = { layer_attrs = {
"num_features": node.input_shape[0][1], "num_features": node.in_shapes[0][1],
"epsilon": eps, "epsilon": eps,
"weight_attr": False, "weight_attr": False,
"bias_attr": False, "bias_attr": False,
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.BatchNorm2D", "paddle.nn.BatchNorm2D",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
...@@ -689,10 +698,10 @@ class CaffeOpMapper(OpMapper): ...@@ -689,10 +698,10 @@ class CaffeOpMapper(OpMapper):
"The parameter of {} (type is {}) is not set. So we set the parameters as 0" "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.format(node.layer_name, node.layer_type)) .format(node.layer_name, node.layer_type))
self.params[node.layer_name + "_cparam1"] = np.zeros([ self.params[node.layer_name + "_cparam1"] = np.zeros([
node.input_shape[0][1], node.in_shapes[0][1],
]).astype("float32") ]).astype("float32")
self.params[node.layer_name + "_cparam2"] = np.zeros([ self.params[node.layer_name + "_cparam2"] = np.zeros([
node.input_shape[0][1], node.in_shapes[0][1],
]).astype("float32") ]).astype("float32")
else: else:
self.params[node.layer_name + "_cparam1"] = np.squeeze(node.data[ self.params[node.layer_name + "_cparam1"] = np.squeeze(node.data[
...@@ -703,10 +712,10 @@ class CaffeOpMapper(OpMapper): ...@@ -703,10 +712,10 @@ class CaffeOpMapper(OpMapper):
axis = params.axis axis = params.axis
inputs = [] inputs = []
if len(node.inputs) == 2: if len(node.inputs) == 2:
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
input1 = self.graph.get_bottom_node(node, idx=1, copy=True) input1 = self.graph.get_input_node(node, idx=1, copy=True)
input0_name = self.get_input_name(input0) input0_name = input0.name
input1_name = self.get_input_name(input1) input1_name = input1.name
inputs_dict = {} inputs_dict = {}
inputs_dict['x'] = input0_name inputs_dict['x'] = input0_name
inputs_dict['y'] = input1_name inputs_dict['y'] = input1_name
...@@ -722,8 +731,8 @@ class CaffeOpMapper(OpMapper): ...@@ -722,8 +731,8 @@ class CaffeOpMapper(OpMapper):
outputs=[node.layer_name + "_cparam1"], outputs=[node.layer_name + "_cparam1"],
shape=self.params[node.layer_name + "_cparam1"].shape, shape=self.params[node.layer_name + "_cparam1"].shape,
attr=string(node.layer_name + "_cparam1")) attr=string(node.layer_name + "_cparam1"))
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
input0_name = self.get_input_name(input0) input0_name = input0.name
inputs_dict = {} inputs_dict = {}
inputs_dict['x'] = input0_name inputs_dict['x'] = input0_name
inputs_dict['y'] = node.layer_name + "_cparam1" inputs_dict['y'] = node.layer_name + "_cparam1"
...@@ -741,7 +750,7 @@ class CaffeOpMapper(OpMapper): ...@@ -741,7 +750,7 @@ class CaffeOpMapper(OpMapper):
inputs_dict = {} inputs_dict = {}
inputs_dict['x'] = node.layer_name + "_mul" inputs_dict['x'] = node.layer_name + "_mul"
inputs_dict['y'] = node.layer_name + "_cparam2" inputs_dict['y'] = node.layer_name + "_cparam2"
output_shape = node.output_shape[0] output_shape = node.out_shapes[0]
if axis == -1: if axis == -1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.add", "paddle.add",
...@@ -765,11 +774,11 @@ class CaffeOpMapper(OpMapper): ...@@ -765,11 +774,11 @@ class CaffeOpMapper(OpMapper):
outputs=[node.layer_name]) outputs=[node.layer_name])
def Reshape(self, node): def Reshape(self, node):
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
output_shape = node.output_shape[0] output_shape = node.out_shapes[0]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.reshape", "paddle.reshape",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
shape=output_shape) shape=output_shape)
...@@ -778,8 +787,8 @@ class CaffeOpMapper(OpMapper): ...@@ -778,8 +787,8 @@ class CaffeOpMapper(OpMapper):
assert len(node.inputs) == 1 and len( assert len(node.inputs) == 1 and len(
node.outputs node.outputs
) == 1, "The count of ArgMax node\'s input and output is not 1." ) == 1, "The count of ArgMax node\'s input and output is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
input_shape = node.input_shape[0] input_shape = node.in_shapes[0]
params = node.layer.argmax_param params = node.layer.argmax_param
out_max_val = params.out_max_val if hasattr(params, out_max_val = params.out_max_val if hasattr(params,
out_max_val) else False out_max_val) else False
...@@ -790,7 +799,7 @@ class CaffeOpMapper(OpMapper): ...@@ -790,7 +799,7 @@ class CaffeOpMapper(OpMapper):
if out_max_val is True: if out_max_val is True:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.topk", "paddle.topk",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name + "_topk_var", node.layer_name + "_index_var"], outputs=[node.layer_name + "_topk_var", node.layer_name + "_index_var"],
k=top_k) k=top_k)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -806,7 +815,7 @@ class CaffeOpMapper(OpMapper): ...@@ -806,7 +815,7 @@ class CaffeOpMapper(OpMapper):
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.topk", "paddle.topk",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=["_", node.layer_name], outputs=["_", node.layer_name],
k=top_k) k=top_k)
...@@ -814,14 +823,14 @@ class CaffeOpMapper(OpMapper): ...@@ -814,14 +823,14 @@ class CaffeOpMapper(OpMapper):
assert len(node.inputs) == 1 and len( assert len(node.inputs) == 1 and len(
node.outputs node.outputs
) == 1, "The count of Axpy node\'s input and output is not 1." ) == 1, "The count of Axpy node\'s input and output is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.axpy_param params = node.layer.axpy_param
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
input1 = self.graph.get_bottom_node(node, idx=1, copy=True) input1 = self.graph.get_input_node(node, idx=1, copy=True)
input2 = self.graph.get_bottom_node(node, idx=2, copy=True) input2 = self.graph.get_input_node(node, idx=2, copy=True)
input0_name = self.get_input_name(input0) input0_name = input0.name
input1_name = self.get_input_name(input1) input1_name = input1.name
input2_name = self.get_input_name(input2) input2_name = input2.name
inputs_dict = {} inputs_dict = {}
inputs_dict['x'] = input1_name inputs_dict['x'] = input1_name
inputs_dict['y'] = input0_name inputs_dict['y'] = input0_name
...@@ -842,11 +851,11 @@ class CaffeOpMapper(OpMapper): ...@@ -842,11 +851,11 @@ class CaffeOpMapper(OpMapper):
def Crop(self, node): def Crop(self, node):
assert len( assert len(
node.inputs) == 2, "The count of Crop node\'s input is not 2." node.inputs) == 2, "The count of Crop node\'s input is not 2."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
example = self.graph.get_bottom_node(node, idx=1, copy=True) example = self.graph.get_input_node(node, idx=1, copy=True)
params = node.layer.crop_param params = node.layer.crop_param
axis = params.axis axis = params.axis
input_shape = node.input_shape[0] input_shape = node.in_shapes[0]
if axis < 0: if axis < 0:
axis += len(input_shape) axis += len(input_shape)
offset_real = [0] * len(input_shape) offset_real = [0] * len(input_shape)
...@@ -858,26 +867,26 @@ class CaffeOpMapper(OpMapper): ...@@ -858,26 +867,26 @@ class CaffeOpMapper(OpMapper):
offset_real = [0] * axis + offset offset_real = [0] * axis + offset
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.crop", "paddle.crop",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
shape=node.input_shape[1], shape=node.in_shapes[1],
offsets=list(offset_real)) offsets=list(offset_real))
def Flatten(self, node): def Flatten(self, node):
assert len( assert len(
node. node.
inputs) == 1, "The count of DetectionOutput node\'s input is not 1." inputs) == 1, "The count of DetectionOutput node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.reshape", "paddle.reshape",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
shape=node.output_shape[0]) shape=node.out_shapes[0])
def Power(self, node): def Power(self, node):
assert len( assert len(
node.inputs) == 1, "The count of Permute node\'s input is not 1." node.inputs) == 1, "The count of Permute node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.power_param params = node.layer.power_param
layer_attrs = { layer_attrs = {
'scale': params.scale, 'scale': params.scale,
...@@ -886,7 +895,7 @@ class CaffeOpMapper(OpMapper): ...@@ -886,7 +895,7 @@ class CaffeOpMapper(OpMapper):
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.scale", "paddle.scale",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
**layer_attrs) **layer_attrs)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -898,14 +907,14 @@ class CaffeOpMapper(OpMapper): ...@@ -898,14 +907,14 @@ class CaffeOpMapper(OpMapper):
def Reduction(self, node): def Reduction(self, node):
assert len( assert len(
node.inputs) == 1, "The count of Reduction node\'s input is not 1." node.inputs) == 1, "The count of Reduction node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.reduction_param params = node.layer.reduction_param
operation = params.operation operation = params.operation
axis = params.axis axis = params.axis
coeff = params.coeff coeff = params.coeff
assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % ( assert operation >= 1 and operation <= 4, "reduction reduction [%s] error" % (
operation) operation)
input_len = len(node.input_shape[0]) input_len = len(node.in_shapes[0])
if axis < 0: if axis < 0:
axis += input_len + 1 axis += input_len + 1
dim = list(range(input_len)) dim = list(range(input_len))
...@@ -917,14 +926,14 @@ class CaffeOpMapper(OpMapper): ...@@ -917,14 +926,14 @@ class CaffeOpMapper(OpMapper):
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.sum", "paddle.sum",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
**layer_attrs) **layer_attrs)
# operation = ASUM # operation = ASUM
elif operation == 2: elif operation == 2:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.abs", "paddle.abs",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name]) outputs=[node.layer_name])
layer_attrs = { layer_attrs = {
"dim": dim[axis:], "dim": dim[axis:],
...@@ -939,7 +948,7 @@ class CaffeOpMapper(OpMapper): ...@@ -939,7 +948,7 @@ class CaffeOpMapper(OpMapper):
elif operation == 3: elif operation == 3:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.pow", "paddle.pow",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
exponent=2.0) exponent=2.0)
layer_attrs = { layer_attrs = {
...@@ -959,7 +968,7 @@ class CaffeOpMapper(OpMapper): ...@@ -959,7 +968,7 @@ class CaffeOpMapper(OpMapper):
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.mean", "paddle.mean",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
**layer_attrs) **layer_attrs)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -976,16 +985,16 @@ class CaffeOpMapper(OpMapper): ...@@ -976,16 +985,16 @@ class CaffeOpMapper(OpMapper):
node.inputs) == 3, "The count of DetectionOutput node\'s input is not 3." node.inputs) == 3, "The count of DetectionOutput node\'s input is not 3."
inputs_dict = dict() inputs_dict = dict()
for i in range(len(node.inputs)): for i in range(len(node.inputs)):
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_input_node(node, idx=i, copy=True)
if i == 1: if i == 1:
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_input_node(node, idx=i, copy=True)
while input is not None \ while input is not None \
and input.layer_type != 'Softmax' \ and input.layer_type != 'Softmax' \
and input.layer_type != 'Sigmoid': and input.layer_type != 'Sigmoid':
input = self.graph.get_bottom_node(input, idx=0, copy=True) input = self.graph.get_input_node(input, idx=0, copy=True)
assert input is not None, 'This kind of DetectionOutput is not supported!' assert input is not None, 'This kind of DetectionOutput is not supported!'
input = self.graph.get_bottom_node(input, idx=0, copy=True) input = self.graph.get_input_node(input, idx=0, copy=True)
inputs_dict["x{}".format(i)] = self.get_input_name(input) inputs_dict["x{}".format(i)] = input.name
params = node.layer.detection_output_param params = node.layer.detection_output_param
nms_param = params.nms_param nms_param = params.nms_param
nms_param_dict = dict() nms_param_dict = dict()
...@@ -1018,16 +1027,16 @@ class CaffeOpMapper(OpMapper): ...@@ -1018,16 +1027,16 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [normalize_name, output_name] layer_outputs = [normalize_name, output_name]
assert len( assert len(
node.inputs) == 1, "The count of Normalize node\'s input is not 1." node.inputs) == 1, "The count of Normalize node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.norm_param params = node.layer.norm_param
if node.data is None or len(node.data) != 1: if node.data is None or len(node.data) != 1:
print( print(
"The parameter of {} (type is {}) is not set. So we set the parameters as 0" "The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.format(node.layer_name, node.layer_type)) .format(node.layer_name, node.layer_type))
self.parmas[node.layer_name + ".scale"] = \ self.parmas[node.layer_name + ".scale"] = \
np.zeros([1] if params.channel_shared else [1, 1, 1, node.input_shape[0][1]]).astype("float32") np.zeros([1] if params.channel_shared else [1, 1, 1, node.in_shapes[0][1]]).astype("float32")
else: else:
self.parmas[node.layer_name + ".scale"] = self.adjust_parameters(node)[0] self.parmas[node.layer_name + ".scale"] = _adjust_parameters(node)[0]
layer_attrs = { layer_attrs = {
"axis": -1 if params.channel_shared else 1, "axis": -1 if params.channel_shared else 1,
...@@ -1035,19 +1044,19 @@ class CaffeOpMapper(OpMapper): ...@@ -1035,19 +1044,19 @@ class CaffeOpMapper(OpMapper):
"param_shape": self.parmas[node.layer_name + ".scale"].shape} "param_shape": self.parmas[node.layer_name + ".scale"].shape}
self.pd_pdgraph.add_layer( self.pd_pdgraph.add_layer(
"custom_layer:Normalize", "custom_layer:Normalize",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
def Permute(self, node): def Permute(self, node):
assert len( assert len(
node.inputs) == 1, "The count of Permute node\'s input is not 1." node.inputs) == 1, "The count of Permute node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.permute_param params = node.layer.permute_param
order = list(params.order) order = list(params.order)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.transpose", "paddle.transpose",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
perm=order) perm=order)
...@@ -1057,11 +1066,11 @@ class CaffeOpMapper(OpMapper): ...@@ -1057,11 +1066,11 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [priorbox_name, output_name] layer_outputs = [priorbox_name, output_name]
assert len( assert len(
node.inputs) == 2, "The count of PriorBox node\'s input is not 2." node.inputs) == 2, "The count of PriorBox node\'s input is not 2."
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
input1 = self.graph.get_bottom_node(node, idx=1, copy=True) input1 = self.graph.get_input_node(node, idx=1, copy=True)
inputs_dict = {} inputs_dict = {}
inputs_dict["x0"] = self.get_input_name(input0) inputs_dict["x0"] = input0.name
inputs_dict["x1"] = self.get_input_name(input1) inputs_dict["x1"] = input1.name
params = node.layer.prior_box_param params = node.layer.prior_box_param
steps = tuple(params.step) if type(params.step) \ steps = tuple(params.step) if type(params.step) \
is list or type(params.step) is tuple \ is list or type(params.step) is tuple \
...@@ -1092,10 +1101,10 @@ class CaffeOpMapper(OpMapper): ...@@ -1092,10 +1101,10 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [relu6_name, output_name] layer_outputs = [relu6_name, output_name]
assert len( assert len(
node.inputs) == 1, "The count of RelU6 node\'s input is not 1." node.inputs) == 1, "The count of RelU6 node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.ReLU6", "paddle.nn.ReLU6",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=layer_outputs) outputs=layer_outputs)
def ROIPooling(self, node): def ROIPooling(self, node):
...@@ -1104,11 +1113,11 @@ class CaffeOpMapper(OpMapper): ...@@ -1104,11 +1113,11 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [roipooling_name, output_name] layer_outputs = [roipooling_name, output_name]
assert len( assert len(
node.inputs) == 2, "The count of ROIPooling node\'s input is not 2." node.inputs) == 2, "The count of ROIPooling node\'s input is not 2."
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
input1 = self.graph.get_bottom_node(node, idx=1, copy=True) input1 = self.graph.get_input_node(node, idx=1, copy=True)
inputs_dict = {} inputs_dict = {}
inputs_dict["x0"] = self.get_input_name(input0) inputs_dict["x0"] = input0.name
inputs_dict["x1"] = self.get_input_name(input1) inputs_dict["x1"] = input1.name
params = node.layer.roi_pooling_param params = node.layer.roi_pooling_param
layer_attrs = { layer_attrs = {
"pooled_height": params.pooled_h, "pooled_height": params.pooled_h,
...@@ -1123,18 +1132,18 @@ class CaffeOpMapper(OpMapper): ...@@ -1123,18 +1132,18 @@ class CaffeOpMapper(OpMapper):
def ShuffleChannel(self, node): def ShuffleChannel(self, node):
assert len( assert len(
node.inputs) == 1, "The count of ShuffleChannel node\'s input is not 1." node.inputs) == 1, "The count of ShuffleChannel node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.shuffle_channel_param params = node.layer.shuffle_channel_param
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"fluid.layers.shuffle_channel", "fluid.layers.shuffle_channel",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
group=params.group) group=params.group)
def Upsample(self, node): def Upsample(self, node):
assert len( assert len(
node.inputs) == 1, "The count of Upsample node\'s input is not 1." node.inputs) == 1, "The count of Upsample node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.upsample_param params = node.layer.upsample_param
layer_attrs = { layer_attrs = {
"align_corners": False, "align_corners": False,
...@@ -1142,7 +1151,7 @@ class CaffeOpMapper(OpMapper): ...@@ -1142,7 +1151,7 @@ class CaffeOpMapper(OpMapper):
"mode": "nearest"} "mode": "nearest"}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.functioanl.interpolate", "paddle.nn.functioanl.interpolate",
inputs={"input": self.get_input_name(input)}, inputs={"input": input.name},
outputs=[node.layer_name], outputs=[node.layer_name],
**layer_attrs) **layer_attrs)
...@@ -1152,8 +1161,8 @@ class CaffeOpMapper(OpMapper): ...@@ -1152,8 +1161,8 @@ class CaffeOpMapper(OpMapper):
layer_outputs = [select_name, output_name] layer_outputs = [select_name, output_name]
assert len( assert len(
node.inputs) == 1, "The count of Select node\'s input is not 1." node.inputs) == 1, "The count of Select node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
input_shape = node.input_shape[0] input_shape = node.in_shapes[0]
params = node.layer.select_param params = node.layer.select_param
layer_attrs = { layer_attrs = {
"input_shape": input_shape, "input_shape": input_shape,
...@@ -1161,28 +1170,10 @@ class CaffeOpMapper(OpMapper): ...@@ -1161,28 +1170,10 @@ class CaffeOpMapper(OpMapper):
"axis": params.axis} "axis": params.axis}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"custom_layer:Select", "custom_layer:Select",
inputs={"x": self.get_input_name(input)}, inputs={"x": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
def directly_map(self, node):
assert node.layer_type in self.directly_map_ops
op_info = self.directly_map_ops[node.layer_type]
input = self.graph.get_bottom_node(node, idx=0, copy=True)
prefix_name = node.layer_type.lower()
if prefix_name in self.nn_name2id:
self.nn_name2id[prefix_name] += 1
else:
self.nn_name2id[prefix_name] = 0
first_output_name = prefix_name + str(self.nn_name2id[prefix_name])
output_name = node.layer_name
layer_outputs = [relu_name, output_name]
assert len(
node.inputs) == 1, "The count of Activate node\'s input is not 1."
input = self.graph.get_bottom_node(node, idx=0, copy=True)
self.paddle_graph.add_layer(
op_info,
inputs={"input": self.get_input_name(input)},
outputs=layer_outputs)
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import sys
from x2paddle.op_mapper.dygraph.onnx2paddle.opset9 import OpSet9 from x2paddle.op_mapper.dygraph.onnx2paddle.opset9 import OpSet9
from x2paddle.core.op_mapper import OpMapper from x2paddle.core.op_mapper import OpMapper
from x2paddle.decoder.onnx_decoder import ONNXGraphNode from x2paddle.decoder.onnx_decoder import ONNXGraphNode
...@@ -25,34 +26,33 @@ class ONNXOpMapper(OpMapper): ...@@ -25,34 +26,33 @@ class ONNXOpMapper(OpMapper):
self.default_op_set = 9 self.default_op_set = 9
self.graph = decoder.graph self.graph = decoder.graph
self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="dygraph", source_type="onnx") self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="dygraph", source_type="onnx")
self.paddle_graph.outputs = self.graph.output_nodes
self.opset = self.create_opset(decoder) self.opset = self.create_opset(decoder)
if not self.op_checker(): if not self.op_checker():
raise Exception("Model are not supported yet.") raise Exception("Model is not supported yet.")
#mapping op
print("Total nodes: {}".format( print("Total nodes: {}".format(
sum([ sum([
isinstance(node, ONNXGraphNode) isinstance(node, ONNXGraphNode)
for name, node in self.graph.node_map.items() for name, node in self.graph.node_map.items()
]))) ])))
print("Nodes converting ...") print("Nodes converting ...")
for node_name in self.graph.topo_sort: for i, node_name in enumerate(self.graph.topo_sort):
sys.stderr.write("\rConverting node {} ... ".format(i + 1))
node = self.graph.get_node(node_name) node = self.graph.get_node(node_name)
op = node.layer_type op = node.layer_type
if hasattr(self.opset, op): if hasattr(self.opset, op):
func = getattr(self.opset, op) func = getattr(self.opset, op)
func(node) func(node)
elif op in self.opset.default_op_mapping: elif op in self.opset.directly_map_ops:
self.opset.directly_map(node) self.opset.directly_map(node)
elif op in self.opset.elementwise_ops: elif op in self.opset.elementwise_ops:
self.opset.elementwise_map(node) self.opset.elementwise_map(node)
print("Nodes converted.") print("\nNodes converted.")
self.weights = self.opset.weights
self.inputs_info = self.opset.inputs_info
self.paddle_graph.set_name(self.graph.graph_name) self.paddle_graph.set_name(self.graph.graph_name)
self.paddle_graph.set_parameters(self.weights) self.paddle_graph.set_parameters(self.opset.weights)
self.paddle_graph.set_inputs_info(self.inputs_info) self.paddle_graph.set_inputs_info(self.opset.inputs_info)
self.paddle_graph.outputs = self.graph.output_nodes
def op_checker(self): def op_checker(self):
unsupported_ops = set() unsupported_ops = set()
...@@ -60,16 +60,17 @@ class ONNXOpMapper(OpMapper): ...@@ -60,16 +60,17 @@ class ONNXOpMapper(OpMapper):
node = self.graph.get_node(node_name) node = self.graph.get_node(node_name)
op = node.layer_type op = node.layer_type
if not hasattr(self.opset, op) and \ if not hasattr(self.opset, op) and \
op not in self.opset.default_op_mapping and \ op not in self.opset.directly_map_ops and \
op not in self.opset.elementwise_ops: op not in self.opset.elementwise_ops:
unsupported_ops.add(op) unsupported_ops.add(op)
if len(unsupported_ops) == 0: if len(unsupported_ops) == 0:
return True return True
else: else:
print("There are {} ops not supported yet, list as below".format( if len(unsupported_ops) > 0:
print("\n========= {} OPs are not supported yet ===========".format(
len(unsupported_ops))) len(unsupported_ops)))
for op in unsupported_ops: for op in unsupported_ops:
print(op) print("========== {} ============".format(op))
return False return False
def create_opset(self, decoder): def create_opset(self, decoder):
......
...@@ -40,7 +40,7 @@ def _const_weight_or_none(node, necessary=False): ...@@ -40,7 +40,7 @@ def _const_weight_or_none(node, necessary=False):
return node.weight return node.weight
if necessary: if necessary:
assert '{} should be an initializer or Constant operator.'.format( assert '{} should be an initializer or Constant operator.'.format(
node.layer_name) node.name)
return None return None
...@@ -74,7 +74,7 @@ def print_mapping_info(func): ...@@ -74,7 +74,7 @@ def print_mapping_info(func):
res = func(*args, **kwargs) res = func(*args, **kwargs)
except: except:
print("convert failed node:{}, op_type is {}".format( print("convert failed node:{}, op_type is {}".format(
node.layer_name[9:], node.layer_type)) node.name[9:], node.layer_type))
raise raise
else: else:
return res return res
...@@ -91,50 +91,46 @@ class OpSet9(): ...@@ -91,50 +91,46 @@ class OpSet9():
'Pow': 'paddle.pow', 'Pow': 'paddle.pow',
} }
default_op_mapping_field_values = OrderedDict() directly_map_ops = {
default_op_mapping_field_values['PADDLE_OP'] = '' 'Ceil': ['paddle.ceil'],
default_op_mapping_field_values['PADDLE_INPUT_ARGS'] = None # reduce function
default_op_mapping_field_values['ATTR_MAPPING'] = dict() 'ReduceMean': ['paddle.mean',
default_op_mapping_field_values['DEFAULTS'] = dict() dict(axes='axis', keepdims='keepdim'),
dict(keepdims=1)],
default_op_mapping = { 'ReduceSum': ['paddle.sum',
'Shape': ['paddle.shape', ['input']], dict(axes='axis', keepdims='keepdim'),
'Ceil': ['paddle.ceil', ['x']], dict(keepdims=1)],
'ReduceMean': [ 'ReduceMin': ['paddle.min',
'paddle.mean', ['x'], dict( dict(axes='axis', keepdims='keepdim'),
axes='axis', keepdims='keepdim'), dict(keepdim=1) dict(keepdim=1)],
], 'ReduceMax': ['paddle.max',
'ReduceSum': [ dict(axes='axis', keepdims='keepdim'),
'paddle.sum', ['x'], dict( dict(keepdim=1)],
axes='axis', keepdims='keepdim'), dict(keepdim=1) # active function
], 'Relu': ['paddle.nn.ReLU'],
'ReduceMin': [ 'LeakyRelu': ['paddle.nn.LeakyReLU',
'paddle.min', ['x'], dict( dict(alpha='negative_slope'),
axes='axis', keepdims='keepdim'), dict(keepdim=1)
],
'ReduceMax': [
'paddle.max', ['x'], dict(
axes='axis', keepdims='keepdim'), dict(keepdim=1)
],
#active function
'Relu': ['paddle.nn.ReLU', ['x']],
'LeakyRelu': ['paddle.nn.LeakyReLU', ['x'], dict(alpha='negative_slope'),
dict(negative_slope=.01)], dict(negative_slope=.01)],
'Elu': ['paddle.nn.functional.elu', ['x'], dict(), dict(alpha=1.)], 'Elu': ['paddle.nn.functional.elu',
'ThresholdedRelu': [ dict(),
'paddle.nn.functional.thresholded_relu', ['x'], dict(alpha='threshold'), dict(alpha=1.)],
dict(alpha=1.) 'ThresholdedRelu': ['paddle.nn.functional.thresholded_relu',
], dict(alpha='threshold'),
'Tanh': ['paddle.nn.Tanh', ['x']], dict(alpha=1.)],
'Sigmoid': ['paddle.nn.Sigmoid', ['x']], 'Tanh': ['paddle.nn.Tanh'],
'Softsign': ['paddle.nn.Softsign', ['x']], 'Sigmoid': ['paddle.nn.Sigmoid'],
'Softplus': ['paddle.nn.Softplus', ['x'], dict(), dict(threshold=float(sys.maxsize))], 'Softsign': ['paddle.nn.Softsign'],
'Exp': ['paddle.exp', ['x']], 'Softplus': ['paddle.nn.Softplus',
'Softmax': ['paddle.nn.Softmax', ['x'], dict(), dict(axis=1)], dict(),
'Sqrt': ['paddle.sqrt', ['x']], dict(threshold=float(sys.maxsize))],
'Floor': ['paddle.floor', ['x']], 'Exp': ['paddle.exp'],
'Abs': ['paddle.abs', ['x']], 'Softmax': ['paddle.nn.Softmax',
'Erf': ['paddle.erf', ['x']], dict(),
dict(axis=1)],
'Sqrt': ['paddle.sqrt'],
'Floor': ['paddle.floor'],
'Abs': ['paddle.abs'],
'Erf': ['paddle.erf'],
} }
def __init__(self, decoder, paddle_graph): def __init__(self, decoder, paddle_graph):
...@@ -146,71 +142,55 @@ class OpSet9(): ...@@ -146,71 +142,55 @@ class OpSet9():
self.weights = dict() self.weights = dict()
self.nn_name2id = dict() self.nn_name2id = dict()
def get_node_name(self, node):
if hasattr(node, "index"):
return "{}_{}".format(node.layer_name, node.index)
else:
return node.layer_name
@print_mapping_info @print_mapping_info
def directly_map(self, node, *args, **kwargs): def directly_map(self, node, *args, **kwargs):
inputs = node.layer.input inputs = node.layer.input
op_type = node.layer_type
attrs = node.attr_map
info = self.default_op_mapping[op_type]
info.extend(
list(self.default_op_mapping_field_values.values())[len(info):])
(paddle_op,
paddle_input_args,
attr_mapping,
default_attrs) = info
mapped_attrs = {
attr_mapping.get(key, key): value
for key, value in attrs.items()
}
if '' in mapped_attrs:
mapped_attrs.pop('')
if '_' in mapped_attrs:
mapped_attrs.pop('_')
layer_attrs = default_attrs.copy()
layer_attrs.update(mapped_attrs)
assert len(inputs) == 1, 'directly_map error with multi inputs' assert len(inputs) == 1, 'directly_map error with multi inputs'
input = self.graph.get_input_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
onnx_attrs = node.attr_map
if '' in onnx_attrs:
onnx_attrs.pop('')
if '_' in onnx_attrs:
onnx_attrs.pop('_')
op_info = self.directly_map_ops[node.layer_type]
paddle_op = op_info[0]
layer_attrs = dict()
if len(op_info) > 1:
attrs_name_map_dict = op_info[1]
for onnx_attr_name, pd_attr_name in attrs_name_map_dict.items():
if onnx_attr_name in onnx_attrs:
layer_attrs[pd_attr_name] = onnx_attrs[onnx_attr_name]
else:
layer_attrs[pd_attr_name] = op_info[2][onnx_attr_name]
if paddle_op.startswith("paddle.nn"): if paddle_op.startswith("paddle.nn"):
op_name = paddle_op[10:].lower() op_name = paddle_op[10:].lower()
op_name = name_generator(op_name, self.nn_name2id) op_name = name_generator(op_name, self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel=paddle_op, kernel=paddle_op,
inputs={paddle_input_args[0]: self.get_node_name(input)}, inputs={"x": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel=paddle_op, kernel=paddle_op,
inputs={paddle_input_args[0]: self.get_node_name(input)}, inputs={"x": input.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
if paddle_op == 'paddle.shape':
self.paddle_graph.add_layer(
'paddle.cast',
inputs={"x": node.layer_name},
outputs=[node.layer_name],
dtype=string('int64'))
@print_mapping_info @print_mapping_info
def elementwise_map(self, node): def elementwise_map(self, node):
assert node.layer_type in self.elementwise_ops
op_type = self.elementwise_ops[node.layer_type] op_type = self.elementwise_ops[node.layer_type]
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_y = self.graph.get_input_node(node, idx=1, copy=True) val_y = self.graph.get_input_node(node, idx=1, copy=True)
inputs_dict = {'x': self.get_node_name(val_x), inputs_dict = {'x': val_x.name,
'y': self.get_node_name(val_y)} 'y': val_y.name}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
op_type, op_type,
inputs=inputs_dict, inputs=inputs_dict,
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def place_holder(self, node): def place_holder(self, node):
...@@ -223,7 +203,7 @@ class OpSet9(): ...@@ -223,7 +203,7 @@ class OpSet9():
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.to_tensor", kernel="paddle.to_tensor",
inputs={}, inputs={},
outputs=[node.layer_name], outputs=[node.name],
data="x{}".format(self.input_index)) data="x{}".format(self.input_index))
self.inputs_info["x{}".format(self.input_index)] = [shape, node.dtype] self.inputs_info["x{}".format(self.input_index)] = [shape, node.dtype]
self.input_index += 1 self.input_index += 1
...@@ -238,18 +218,18 @@ class OpSet9(): ...@@ -238,18 +218,18 @@ class OpSet9():
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.full", "paddle.full",
inputs={}, inputs={},
outputs=[node.layer_name], outputs=[node.name],
dtype=string(dtype), dtype=string(dtype),
shape=[1], shape=[1],
fill_value=node.weight) fill_value=node.weight)
else: else:
self.weights[node.layer_name] = node.weight self.weights[node.name] = node.weight
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"self.create_parameter", "self.create_parameter",
inputs={}, inputs={},
outputs=[node.layer_name], outputs=[node.name],
shape=shape, shape=shape,
attr=string(node.layer_name), attr=string(node.name),
dtype=string(dtype), dtype=string(dtype),
default_initializer="paddle.nn.initializer.Constant(value=0.0)") default_initializer="paddle.nn.initializer.Constant(value=0.0)")
...@@ -269,23 +249,23 @@ class OpSet9(): ...@@ -269,23 +249,23 @@ class OpSet9():
def _interpolate(self, node): def _interpolate(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
inputs = {'x': self.get_node_name(val_x)} inputs = {'x': val_x.name}
if node.layer_type == 'Resize': if node.layer_type == 'Resize':
if len(node.layer.input) == 2: if len(node.layer.input) == 2:
# opset 10 # opset 10
val_scales = self.graph.get_input_node(node, idx=1, copy=True) val_scales = self.graph.get_input_node(node, idx=1, copy=True)
inputs['scale_factor'] = self.get_node_name(val_scales) inputs['scale_factor'] = val_scales.name
elif len(node.layer.input) == 3: elif len(node.layer.input) == 3:
# opset 11 # opset 11
val_scales = self.graph.get_input_node(node, idx=2, copy=True) val_scales = self.graph.get_input_node(node, idx=2, copy=True)
inputs['scale_factor'] = self.get_node_name(val_scales) inputs['scale_factor'] = val_scales.name
elif len(node.layer.input) == 4: elif len(node.layer.input) == 4:
# opset 11 # opset 11
val_sizes = self.graph.get_input_node(node, idx=3, copy=True) val_sizes = self.graph.get_input_node(node, idx=3, copy=True)
var_nc, var_hw = val_sizes.layer_name + '_nc', val_sizes.layer_name + '_hw' var_nc, var_hw = val_sizes.name + '_nc', val_sizes.name + '_hw'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.split', 'paddle.split',
inputs={"x": self.get_node_name(val_sizes)}, inputs={"x": val_sizes.name},
outputs=[var_nc, var_hw], outputs=[var_nc, var_hw],
num_or_sections=[2, 2], num_or_sections=[2, 2],
axis=0) axis=0)
...@@ -305,7 +285,7 @@ class OpSet9(): ...@@ -305,7 +285,7 @@ class OpSet9():
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.resize_nearest", kernel="fluid.layers.resize_nearest",
inputs=inputs, inputs=inputs,
outputs=[node.layer_name], outputs=[node.name],
**attrs) **attrs)
return return
elif node.layer_type == 'Upsample': elif node.layer_type == 'Upsample':
...@@ -319,7 +299,7 @@ class OpSet9(): ...@@ -319,7 +299,7 @@ class OpSet9():
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.nn.functional.interpolate", kernel="paddle.nn.functional.interpolate",
inputs=inputs, inputs=inputs,
outputs=[node.layer_name], outputs=[node.name],
**attrs) **attrs)
@print_mapping_info @print_mapping_info
...@@ -329,17 +309,30 @@ class OpSet9(): ...@@ -329,17 +309,30 @@ class OpSet9():
beta = node.get_attr('beta', 0.5) beta = node.get_attr('beta', 0.5)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.scale", kernel="paddle.scale",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name + "_val"], outputs=[node.name + "_val"],
scale=alpha, scale=alpha,
bias=beta) bias=beta)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.clip", kernel="paddle.clip",
inputs={"x": node.layer_name + "_val"}, inputs={"x": node.name + "_val"},
outputs=[node.layer_name], outputs=[node.name],
min=0.0, min=0.0,
max=1.0) max=1.0)
@print_mapping_info
def Shape(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer(
kernel="paddle.shape",
inputs={"input": val_x.name},
outputs=[node.name])
self.paddle_graph.add_layer(
'paddle.cast',
inputs={"x": node.name},
outputs=[node.name],
dtype=string('int64'))
@print_mapping_info @print_mapping_info
def RoiAlign(self, node): def RoiAlign(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
...@@ -357,9 +350,9 @@ class OpSet9(): ...@@ -357,9 +350,9 @@ class OpSet9():
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'fluid.layers.roi_align', 'fluid.layers.roi_align',
inputs={'input': self.get_node_name(val_x), inputs={'input': val_x.name,
'rois': self.get_node_name(val_rois)}, 'rois': val_rois.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
...@@ -377,9 +370,9 @@ class OpSet9(): ...@@ -377,9 +370,9 @@ class OpSet9():
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'fluid.layers.roi_pool', 'fluid.layers.roi_pool',
inputs={'input': self.get_node_name(val_x), inputs={'input': val_x.name,
'rois': self.get_node_name(val_rois)}, 'rois': val_rois.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
@print_mapping_info @print_mapping_info
...@@ -432,17 +425,17 @@ class OpSet9(): ...@@ -432,17 +425,17 @@ class OpSet9():
if op_independent: if op_independent:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={'x': self.get_node_name(val_x)}, inputs={'x': val_x.name},
outputs=[nn_op_name, node.layer_name] if paddle_op == 'paddle.nn.Pad2D' else [node.layer_name], outputs=[nn_op_name, node.name] if paddle_op == 'paddle.nn.Pad2D' else [node.name],
**layer_attrs) **layer_attrs)
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={'x': self.get_node_name(val_x)}, inputs={'x': val_x.name},
outputs=[nn_op_name, node.layer_name + '_paded'] if paddle_op == 'paddle.nn.Pad2D' \ outputs=[nn_op_name, node.name + '_paded'] if paddle_op == 'paddle.nn.Pad2D' \
else [node.layer_name + '_paded'], else [node.name + '_paded'],
**layer_attrs) **layer_attrs)
return node.layer_name + '_paded' return node.name + '_paded'
@print_mapping_info @print_mapping_info
def Unsqueeze(self, node): def Unsqueeze(self, node):
...@@ -450,17 +443,17 @@ class OpSet9(): ...@@ -450,17 +443,17 @@ class OpSet9():
axes = node.get_attr('axes') axes = node.get_attr('axes')
layer_attrs = {'axis': axes} layer_attrs = {'axis': axes}
if len(val_x.out_shapes[0]) == 0: if len(val_x.out_shapes[0]) == 0:
if node.layer_name: if node.name:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
shape=[1]) shape=[1])
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.unsqueeze', 'paddle.unsqueeze',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
@print_mapping_info @print_mapping_info
...@@ -471,8 +464,8 @@ class OpSet9(): ...@@ -471,8 +464,8 @@ class OpSet9():
assert bias == 0.0, 'not support bias!=0' assert bias == 0.0, 'not support bias!=0'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.nn.functional.hardshrink', 'paddle.nn.functional.hardshrink',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
threshold=lambd) threshold=lambd)
@print_mapping_info @print_mapping_info
...@@ -494,26 +487,26 @@ class OpSet9(): ...@@ -494,26 +487,26 @@ class OpSet9():
_logger.warning('in (Constant -> %s): ' _logger.warning('in (Constant -> %s): '
'attribute "shape" of %s not inferred, ' 'attribute "shape" of %s not inferred, '
'using value as 1-D tensor may lead to fails', 'using value as 1-D tensor may lead to fails',
val_output.layer_name, val_output.layer_name) val_output.name, val_output.name)
if len(value) == 1: if len(value) == 1:
value = value.tolist() value = value.tolist()
value = value[0] value = value[0]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.full", "paddle.full",
inputs={}, inputs={},
outputs=[node.layer_name], outputs=[node.name],
dtype=string(dtype), dtype=string(dtype),
shape=[1], shape=[1],
fill_value=value) fill_value=value)
else: else:
value = np.reshape(value, shape) value = np.reshape(value, shape)
self.weights[node.layer_name] = value self.weights[node.name] = value
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"self.create_parameter", "self.create_parameter",
inputs={}, inputs={},
outputs=[node.layer_name], outputs=[node.name],
shape=shape, shape=shape,
attr=string(node.layer_name), attr=string(node.name),
dtype=string(dtype), dtype=string(dtype),
default_initializer="paddle.nn.initializer.Constant(value=0.0)") default_initializer="paddle.nn.initializer.Constant(value=0.0)")
...@@ -528,7 +521,7 @@ class OpSet9(): ...@@ -528,7 +521,7 @@ class OpSet9():
@print_mapping_info @print_mapping_info
def InstanceNormalization(self, node): def InstanceNormalization(self, node):
op_name = name_generator("instanse_norm", self.nn_name2id) op_name = name_generator("instanse_norm", self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_scale = self.graph.get_input_node(node, idx=1, copy=True) val_scale = self.graph.get_input_node(node, idx=1, copy=True)
...@@ -537,8 +530,8 @@ class OpSet9(): ...@@ -537,8 +530,8 @@ class OpSet9():
layer_attrs = { layer_attrs = {
'num_features': node.out_shapes[0][1], 'num_features': node.out_shapes[0][1],
'epsilon': epsilon, 'epsilon': epsilon,
'weight_attr': string(self.get_node_name(val_scale)), 'weight_attr': string(val_scale.name),
'bias_attr': string(self.get_node_name(val_b)) 'bias_attr': string(val_b.name)
} }
dim = len(val_x.out_shapes[0]) dim = len(val_x.out_shapes[0])
if dim == 2 or dim == 3: if dim == 2 or dim == 3:
...@@ -551,7 +544,7 @@ class OpSet9(): ...@@ -551,7 +544,7 @@ class OpSet9():
raise Exception("The paddle only support 2D, 3D, 4D or 5D input in InstanceNormalization.") raise Exception("The paddle only support 2D, 3D, 4D or 5D input in InstanceNormalization.")
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
...@@ -560,9 +553,9 @@ class OpSet9(): ...@@ -560,9 +553,9 @@ class OpSet9():
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_shape = self.graph.get_input_node(node, idx=1, copy=True) val_shape = self.graph.get_input_node(node, idx=1, copy=True)
val_x_dtype = val_x.dtype val_x_dtype = val_x.dtype
name_ones = node.layer_name + '_ones' name_ones = node.name + '_ones'
attr_ones = { attr_ones = {
'shape': val_shape.layer_name, 'shape': val_shape.name,
'dtype': string(val_x_dtype), 'dtype': string(val_x_dtype),
'fill_value': 1 'fill_value': 1
} }
...@@ -572,11 +565,11 @@ class OpSet9(): ...@@ -572,11 +565,11 @@ class OpSet9():
outputs=[name_ones], outputs=[name_ones],
**attr_ones) **attr_ones)
inputs_dict = {'x': name_ones, inputs_dict = {'x': name_ones,
'y': self.get_node_name(val_x)} 'y': val_x.name}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.multiply', 'paddle.multiply',
inputs=inputs_dict, inputs=inputs_dict,
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def Gather(self, node): def Gather(self, node):
...@@ -590,87 +583,87 @@ class OpSet9(): ...@@ -590,87 +583,87 @@ class OpSet9():
if len(val_x.out_shapes[0]) <= 1: if len(val_x.out_shapes[0]) <= 1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.gather', 'paddle.gather',
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'index': self.get_node_name(indices)}, 'index': indices.name},
outputs=[node.layer_name]) outputs=[node.name])
elif len(val_x.out_shapes[0]) > 1: elif len(val_x.out_shapes[0]) > 1:
if len(indices_shape) == 0: if len(indices_shape) == 0:
gather_ = node.layer_name + '_1' gather_ = node.name + '_1'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.gather', 'paddle.gather',
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'index': self.get_node_name(indices)}, 'index': indices.name},
outputs=[gather_]) outputs=[gather_])
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.squeeze', 'paddle.squeeze',
inputs={'x': gather_}, inputs={'x': gather_},
outputs=[node.layer_name], outputs=[node.name],
axis=[0]) axis=[0])
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.gather', 'paddle.gather',
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'index': self.get_node_name(indices)}, 'index': indices.name},
outputs=[node.layer_name]) outputs=[node.name])
elif axis > 0 and len(indices_shape) <= 1: elif axis > 0 and len(indices_shape) <= 1:
perm = list(range(len(val_x.out_shapes[0]))) perm = list(range(len(val_x.out_shapes[0])))
perm = [axis] + perm[:axis] + perm[axis + 1:] perm = [axis] + perm[:axis] + perm[axis + 1:]
name_trans = val_x.layer_name + '_trans' name_trans = val_x.name + '_trans'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.transpose', 'paddle.transpose',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[name_trans], outputs=[name_trans],
perm=perm) perm=perm)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.gather', 'paddle.gather',
inputs={'x': name_trans, inputs={'x': name_trans,
'index': self.get_node_name(indices)}, 'index': indices.name},
outputs=[node.layer_name]) outputs=[node.name])
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.transpose', 'paddle.transpose',
inputs={"x": node.layer_name}, inputs={"x": node.name},
outputs=[node.layer_name], outputs=[node.name],
perm=perm) perm=perm)
if len(indices_shape) < 1: if len(indices_shape) < 1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.squeeze', 'paddle.squeeze',
inputs={'x': node.layer_name}, inputs={'x': node.name},
outputs=[node.layer_name], outputs=[node.name],
axis=[axis]) axis=[axis])
elif axis == 0 and len(indices_shape) > 1: elif axis == 0 and len(indices_shape) > 1:
if val_x.out_shapes[0] is not None and isinstance( if val_x.out_shapes[0] is not None and isinstance(
val_x, ONNXGraphDataNode): val_x, ONNXGraphDataNode):
indices_cast = indices.layer_name + '_cast' indices_cast = indices.name + '_cast'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.cast', 'paddle.cast',
inputs={"x": self.get_node_name(indices)}, inputs={"x": indices.name},
outputs=indices_cast, outputs=indices_cast,
dtype=string('int64')) dtype=string('int64'))
op_name = name_generator("embedding", self.nn_name2id) op_name = name_generator("embedding", self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.nn.Embedding', 'paddle.nn.Embedding',
inputs={"x": indices_cast}, inputs={"x": indices_cast},
outputs=layer_outputs, outputs=layer_outputs,
param_attr=string(val_x.layer_name), param_attr=string(val_x.name),
size=val_x.out_shapes[0]) size=val_x.out_shapes[0])
else: else:
from functools import reduce from functools import reduce
reshape_shape = reduce(lambda x, y: x * y, indices_shape) reshape_shape = reduce(lambda x, y: x * y, indices_shape)
indices_reshape = indices.layer_name + '_shape' indices_reshape = indices.name + '_shape'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={"x": self.get_node_name(indices)}, inputs={"x": indices.name},
outputs=[indices_reshape], outputs=[indices_reshape],
shape=[reshape_shape, ]) shape=[reshape_shape, ])
perm = list(range(len(val_x.out_shapes[0]))) perm = list(range(len(val_x.out_shapes[0])))
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.gather', 'paddle.gather',
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'index': indices_reshape}, 'index': indices_reshape},
outputs=[node.layer_name]) outputs=[node.name])
val_x_shape = val_x.out_shapes[0] val_x_shape = val_x.out_shapes[0]
reshaped_shape = [] reshaped_shape = []
for i in perm: for i in perm:
...@@ -679,36 +672,36 @@ class OpSet9(): ...@@ -679,36 +672,36 @@ class OpSet9():
reshaped_shape.append(i) reshaped_shape.append(i)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={"x": node.layer_name}, inputs={"x": node.name},
outputs=[node.layer_name], outputs=[node.name],
shape=reshaped_shape) shape=reshaped_shape)
elif axis > 0 and len(indices_shape) > 1: elif axis > 0 and len(indices_shape) > 1:
from functools import reduce from functools import reduce
reshape_shape = reduce(lambda x, y: x * y, indices_shape) reshape_shape = reduce(lambda x, y: x * y, indices_shape)
indices_reshape = indices.layer_name + '_shape' indices_reshape = indices.name + '_shape'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={"x": self.get_node_name(indices)}, inputs={"x": indices.name},
outputs=[indices_reshape], outputs=[indices_reshape],
shape=[reshape_shape, ]) shape=[reshape_shape, ])
perm = list(range(len(val_x.out_shapes[0]))) perm = list(range(len(val_x.out_shapes[0])))
perm = [axis] + perm[:axis] + perm[axis + 1:] perm = [axis] + perm[:axis] + perm[axis + 1:]
name_trans = val_x.layer_name + '_transpose' name_trans = val_x.name + '_transpose'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.transpose', 'paddle.transpose',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[name_trans], outputs=[name_trans],
perm=perm) perm=perm)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.gather', 'paddle.gather',
inputs={'x': name_trans, inputs={'x': name_trans,
'index': indices_reshape}, 'index': indices_reshape},
outputs=[node.layer_name]) outputs=[node.name])
input_transpose = node.layer_name + '_transpose' input_transpose = node.name + '_transpose'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.transpose', 'paddle.transpose',
inputs={"x": node.layer_name}, inputs={"x": node.name},
outputs=[input_transpose], outputs=[input_transpose],
perm=perm) perm=perm)
val_x_shape = val_x.out_shapes[0] val_x_shape = val_x.out_shapes[0]
...@@ -720,7 +713,7 @@ class OpSet9(): ...@@ -720,7 +713,7 @@ class OpSet9():
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={"x": input_transpose}, inputs={"x": input_transpose},
outputs=[node.layer_name], outputs=[node.name],
shape=reshaped_shape) shape=reshaped_shape)
@print_mapping_info @print_mapping_info
...@@ -731,38 +724,38 @@ class OpSet9(): ...@@ -731,38 +724,38 @@ class OpSet9():
if len(indices.out_shapes[0]) == 1: if len(indices.out_shapes[0]) == 1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.scatter', 'paddle.scatter',
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'index': self.get_node_name(indices), 'index': indices.name,
'updates': self.get_node_name(updates)}, 'updates': updates.name},
outputs=[node.layer_name]) outputs=[node.name])
else: else:
input_inner_indices = node.layer_name + '_input_inner_indices' input_inner_indices = node.name + '_input_inner_indices'
shape = val_x.out_shapes[0] shape = val_x.out_shapes[0]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={"x": self.get_node_name(indices)}, inputs={"x": indices.name},
outputs=[self.get_node_name(indices)], outputs=[indices.name],
shape=indices.out_shapes[0]) shape=indices.out_shapes[0])
zeros_like_val_x = val_x.layer_name + '_zeros' zeros_like_val_x = val_x.name + '_zeros'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.zeros_like', 'paddle.zeros_like',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[zeros_like_val_x]) outputs=[zeros_like_val_x])
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.scatter_nd_add', 'paddle.scatter_nd_add',
inputs={ inputs={
'x': zeros_like_val_x, 'x': zeros_like_val_x,
'index': self.get_node_name(indices), 'index': indices.name,
'updates': self.get_node_name(updates) 'updates': updates.name
}, },
outputs=[input_inner_indices]) outputs=[input_inner_indices])
indices_mask = node.layer_name + '_indices_mask' indices_mask = node.name + '_indices_mask'
constant_minus_one = node.layer_name + '_constant_minus_one' constant_minus_one = node.name + '_constant_minus_one'
# full_like support create tensor shape like input tensor # full_like support create tensor shape like input tensor
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.full_like', 'paddle.full_like',
inputs={"x": self.get_node_name(updates)}, inputs={"x": updates.name},
outputs=[constant_minus_one], outputs=[constant_minus_one],
dtype=string(updates.dtype), dtype=string(updates.dtype),
fill_value=-1) fill_value=-1)
...@@ -770,29 +763,29 @@ class OpSet9(): ...@@ -770,29 +763,29 @@ class OpSet9():
'paddle.scatter_nd_add', 'paddle.scatter_nd_add',
inputs={ inputs={
'x': zeros_like_val_x, 'x': zeros_like_val_x,
'index': self.get_node_name(indices), 'index': indices.name,
'updates': constant_minus_one 'updates': constant_minus_one
}, },
outputs=[indices_mask]) outputs=[indices_mask])
constant_one = node.layer_name + '_constant_1' constant_one = node.name + '_constant_1'
# full_like support create tensor shape like input tensor # full_like support create tensor shape like input tensor
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.full_like', 'paddle.full_like',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[constant_one], outputs=[constant_one],
dtype=string(val_x.dtype), dtype=string(val_x.dtype),
fill_value=1) fill_value=1)
input_out_indices_mask = node.layer_name + '_input_out_indices_mask' input_out_indices_mask = node.name + '_input_out_indices_mask'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.add", "paddle.add",
inputs={"x": indices_mask, inputs={"x": indices_mask,
"y": constant_one}, "y": constant_one},
outputs=[input_out_indices_mask]) outputs=[input_out_indices_mask])
input_out_indices = node.layer_name + '_input_out_indices' input_out_indices = node.name + '_input_out_indices'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.multiply", "paddle.multiply",
inputs={"x": self.get_node_name(val_x), inputs={"x": val_x.name,
"y": input_out_indices_mask}, "y": input_out_indices_mask},
outputs=[input_out_indices]) outputs=[input_out_indices])
...@@ -800,7 +793,7 @@ class OpSet9(): ...@@ -800,7 +793,7 @@ class OpSet9():
"paddle.add", "paddle.add",
inputs={"x": input_inner_indices, inputs={"x": input_inner_indices,
"y": input_out_indices}, "y": input_out_indices},
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def Range(self, node): def Range(self, node):
...@@ -808,13 +801,13 @@ class OpSet9(): ...@@ -808,13 +801,13 @@ class OpSet9():
val_limit = self.graph.get_input_node(node, idx=1, copy=True) val_limit = self.graph.get_input_node(node, idx=1, copy=True)
val_delta = self.graph.get_input_node(node, idx=2, copy=True) val_delta = self.graph.get_input_node(node, idx=2, copy=True)
dtype = val_start.dtype dtype = val_start.dtype
inputs = {'start': self.get_node_name(val_start), inputs = {'start': val_start.name,
'end': self.get_node_name(val_limit), 'end': val_limit.name,
'step': self.get_node_name(val_delta)} 'step': val_delta.name}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.arange', 'paddle.arange',
inputs=inputs, inputs=inputs,
outputs=[node.layer_name], outputs=[node.name],
dtype=string(dtype)) dtype=string(dtype))
@print_mapping_info @print_mapping_info
...@@ -836,8 +829,8 @@ class OpSet9(): ...@@ -836,8 +829,8 @@ class OpSet9():
steps = _const_weight_or_none(steps) steps = _const_weight_or_none(steps)
layer_attrs = { layer_attrs = {
"axes": axes, "axes": axes,
"starts": starts.layer_name, "starts": starts.name,
"ends": ends.layer_name "ends": ends.name
} }
if starts_value is not None and ends_value is not None: if starts_value is not None and ends_value is not None:
starts_value = starts_value.copy() starts_value = starts_value.copy()
...@@ -860,18 +853,18 @@ class OpSet9(): ...@@ -860,18 +853,18 @@ class OpSet9():
} }
else: else:
if starts.dtype != 'int32': if starts.dtype != 'int32':
starts_cast = starts.layer_name + '_cast' starts_cast = starts.name + '_cast'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.cast', 'paddle.cast',
inputs={"x": self.get_node_name(starts)}, inputs={"x": starts.name},
outputs=[starts_cast], outputs=[starts_cast],
dtype=string('int32')) dtype=string('int32'))
layer_attrs['starts'] = starts_cast layer_attrs['starts'] = starts_cast
if ends.dtype != 'int32': if ends.dtype != 'int32':
ends_cast = ends.layer_name + '_cast' ends_cast = ends.name + '_cast'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.cast', 'paddle.cast',
inputs={"x": self.get_node_name(ends)}, inputs={"x": ends.name},
outputs=[ends_cast], outputs=[ends_cast],
dtype=string('int32')) dtype=string('int32'))
layer_attrs['ends'] = ends_cast layer_attrs['ends'] = ends_cast
...@@ -888,14 +881,14 @@ class OpSet9(): ...@@ -888,14 +881,14 @@ class OpSet9():
layer_attrs['strides'] = steps layer_attrs['strides'] = steps
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.strided_slice', 'paddle.strided_slice',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.slice', 'paddle.slice',
inputs={"input": self.get_node_name(val_x)}, inputs={"input": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
@print_mapping_info @print_mapping_info
...@@ -911,14 +904,14 @@ class OpSet9(): ...@@ -911,14 +904,14 @@ class OpSet9():
if len(value) == 1: if len(value) == 1:
value = value[0] value = value[0]
layer_attrs = { layer_attrs = {
'shape': val_shape.layer_name, 'shape': val_shape.name,
'dtype': string(dtype), 'dtype': string(dtype),
'fill_value': value 'fill_value': value
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.full", "paddle.full",
inputs={}, inputs={},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
@print_mapping_info @print_mapping_info
...@@ -935,8 +928,8 @@ class OpSet9(): ...@@ -935,8 +928,8 @@ class OpSet9():
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.clip', 'paddle.clip',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
else: else:
max_ipt = self.graph.get_input_node(node, idx=1, copy=True) max_ipt = self.graph.get_input_node(node, idx=1, copy=True)
...@@ -951,8 +944,8 @@ class OpSet9(): ...@@ -951,8 +944,8 @@ class OpSet9():
layer_attrs = {'max': max_value, 'min': min_value} layer_attrs = {'max': max_value, 'min': min_value}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.clip', 'paddle.clip',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
else: else:
raise raise
...@@ -971,13 +964,13 @@ class OpSet9(): ...@@ -971,13 +964,13 @@ class OpSet9():
outputs_list = list() outputs_list = list()
if isinstance(split, list) or isinstance(split, tuple): if isinstance(split, list) or isinstance(split, tuple):
for i, s in enumerate(split): for i, s in enumerate(split):
outputs_list.append("{}_{}".format(node.layer_name, i)) outputs_list.append("{}_p{}".format(node.name, i))
else: else:
outputs_list.append(node.layer_name) outputs_list.append(node.name)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.split', 'paddle.split',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=outputs_list, outputs=outputs_list,
**layer_attrs) **layer_attrs)
...@@ -992,28 +985,28 @@ class OpSet9(): ...@@ -992,28 +985,28 @@ class OpSet9():
if shape_value is not None: if shape_value is not None:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={'x': self.get_node_name(val_x)}, inputs={'x': val_x.name},
outputs=[node.layer_name], outputs=[node.name],
shape=shape_value.tolist()) shape=shape_value.tolist())
elif len(node.out_shapes[0]) > 0 and _is_static_shape(node.out_shapes[ elif len(node.out_shapes[0]) > 0 and _is_static_shape(node.out_shapes[
0]): 0]):
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={'x': self.get_node_name(val_x)}, inputs={'x': val_x.name},
outputs=[node.layer_name], outputs=[node.name],
shape=node.out_shapes[0]) shape=node.out_shapes[0])
else: else:
# shape may be [], come form Gather by scalar indices # shape may be [], come form Gather by scalar indices
if len(val_shape.out_shapes[0]) > 0: if len(val_shape.out_shapes[0]) > 0:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={'x': self.get_node_name(val_shape)}, inputs={'x': val_shape.name},
outputs=[self.get_node_name(val_shape)], outputs=[val_shape.name],
shape=val_shape.out_shapes[0]) shape=val_shape.out_shapes[0])
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'shape': self.get_node_name(val_shape)}, 'shape': val_shape.name},
outputs=node) outputs=node)
@print_mapping_info @print_mapping_info
...@@ -1030,16 +1023,16 @@ class OpSet9(): ...@@ -1030,16 +1023,16 @@ class OpSet9():
assert dtype == output_dtype, 'dtype of to unmatches output' assert dtype == output_dtype, 'dtype of to unmatches output'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.cast', 'paddle.cast',
inputs={'x': self.get_node_name(val_input)}, inputs={'x': val_input.name},
outputs=[node.layer_name], outputs=[node.name],
dtype=string(dtype)) dtype=string(dtype))
@print_mapping_info @print_mapping_info
def Not(self, node): def Not(self, node):
val_input = self.graph.get_input_node(node, idx=0, copy=True) val_input = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer('paddle.logical_not', self.paddle_graph.add_layer('paddle.logical_not',
inputs={'x': self.get_node_name(val_input)}, inputs={'x': val_input.name},
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def AveragePool(self, node): def AveragePool(self, node):
...@@ -1072,16 +1065,16 @@ class OpSet9(): ...@@ -1072,16 +1065,16 @@ class OpSet9():
"pool_padding": paddings, "pool_padding": paddings,
"ceil_mode": ceil_mode, "ceil_mode": ceil_mode,
"exclusive": 'True', "exclusive": 'True',
"name": string(node.layer_name) "name": string(node.name)
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={'input': val_x if isinstance(val_x, str) else self.get_node_name(val_x)}, inputs={'input': val_x if isinstance(val_x, str) else val_x.name},
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
# TODO(syf): op has diff # TODO(syf): op has diff
# op_name = name_generator("pool", self.nn_name2id) # op_name = name_generator("pool", self.nn_name2id)
# output_name = node.layer_name # output_name = node.name
# layer_outputs = [op_name, output_name] # layer_outputs = [op_name, output_name]
# paddle_op = 'paddle.nn.Pool{}D'.format(poolnd) # paddle_op = 'paddle.nn.Pool{}D'.format(poolnd)
# assert 1 <= poolnd <= 3, 'only Pool1D, Pool2D and Pool3D are supported' # assert 1 <= poolnd <= 3, 'only Pool1D, Pool2D and Pool3D are supported'
...@@ -1094,7 +1087,7 @@ class OpSet9(): ...@@ -1094,7 +1087,7 @@ class OpSet9():
# } # }
# self.paddle_graph.add_layer( # self.paddle_graph.add_layer(
# paddle_op, # paddle_op,
# inputs={'x': self.get_node_name(val_x)}, # inputs={'x': val_x.name},
# outputs=layer_outputs, # outputs=layer_outputs,
# **layer_attrs) # **layer_attrs)
...@@ -1104,7 +1097,7 @@ class OpSet9(): ...@@ -1104,7 +1097,7 @@ class OpSet9():
dtypes = set() dtypes = set()
for i in range(len(node.layer.input)): for i in range(len(node.layer.input)):
ipt = self.graph.get_input_node(node, idx=i, copy=True) ipt = self.graph.get_input_node(node, idx=i, copy=True)
inputs_list.append(self.get_node_name(ipt)) inputs_list.append(ipt.name)
dtypes.add(ipt.dtype) dtypes.add(ipt.dtype)
if len(dtypes) > 1: if len(dtypes) > 1:
assert 'Unspported situation happened, please create issue on https://github.com/PaddlePaddle/X2Paddle/issues.' assert 'Unspported situation happened, please create issue on https://github.com/PaddlePaddle/X2Paddle/issues.'
...@@ -1112,7 +1105,7 @@ class OpSet9(): ...@@ -1112,7 +1105,7 @@ class OpSet9():
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.concat', 'paddle.concat',
inputs={"x": inputs_list}, inputs={"x": inputs_list},
outputs=[node.layer_name], outputs=[node.name],
axis=axis) axis=axis)
@print_mapping_info @print_mapping_info
...@@ -1131,8 +1124,8 @@ class OpSet9(): ...@@ -1131,8 +1124,8 @@ class OpSet9():
shape_list[1] *= s shape_list[1] *= s
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
shape=shape_list) shape=shape_list)
@print_mapping_info @print_mapping_info
...@@ -1145,9 +1138,9 @@ class OpSet9(): ...@@ -1145,9 +1138,9 @@ class OpSet9():
beta = node.get_attr('beta', 1.) # optional beta = node.get_attr('beta', 1.) # optional
trans_a = bool(node.get_attr('transA', 0)) # optional trans_a = bool(node.get_attr('transA', 0)) # optional
trans_b = bool(node.get_attr('transB', 0)) # optional trans_b = bool(node.get_attr('transB', 0)) # optional
val_mm = node.layer_name + '_mm' val_mm = node.name + '_mm'
matmul_inputs = {"x": self.get_node_name(val_a), matmul_inputs = {"x": val_a.name,
"y": self.get_node_name(val_b)} "y": val_b.name}
attr_matmul = { attr_matmul = {
"transpose_x": trans_a, "transpose_x": trans_a,
"transpose_y": trans_b, "transpose_y": trans_b,
...@@ -1166,49 +1159,47 @@ class OpSet9(): ...@@ -1166,49 +1159,47 @@ class OpSet9():
if beta != 0: if beta != 0:
if beta == 1.: if beta == 1.:
add_inputs = {"x": val_mm, add_inputs = {"x": val_mm,
"y": self.get_node_name(val_c)} "y": val_c.name}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.add", "paddle.add",
inputs=add_inputs, inputs=add_inputs,
outputs=[node.layer_name]) outputs=[node.name])
else: else:
var_beta = node.layer_name + '_beta' var_beta = node.name + '_beta'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.scale", "paddle.scale",
inputs={"x": self.get_node_name(val_c)}, inputs={"x": val_c.name},
outputs=[var_beta], outputs=[var_beta],
scale=beta) scale=beta)
add_inputs = {"x": val_mm, "y": var_beta} add_inputs = {"x": val_mm, "y": var_beta}
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.add", "paddle.add",
inputs=add_inputs, inputs=add_inputs,
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def Sum(self, node): def Sum(self, node):
val_inps = node.layer.input val_inps = node.layer.input
inputs_dict = { inputs_dict = {
"x": self.get_node_name( "x": self.graph.get_input_node(
self.graph.get_input_node( node, idx=0, copy=True).name,
node, idx=0, copy=True)), "y": self.graph.get_input_node(
"y": self.get_node_name( node, idx=1, copy=True).name,
self.graph.get_input_node(
node, idx=1, copy=True)),
} }
self.paddle_graph.add_layer("paddle.add", self.paddle_graph.add_layer("paddle.add",
inputs=inputs_dict, inputs=inputs_dict,
outputs=[node.layer_name]) outputs=[node.name])
for idx, ipt in enumerate(val_inps[2:]): for idx, ipt in enumerate(val_inps[2:]):
y = self.graph.get_input_node(node, idx=idx, copy=True) y = self.graph.get_input_node(node, idx=idx, copy=True)
inputs_dict = { inputs_dict = {
"x": node.layer_name, "x": node.name,
"y": self.get_node_name(y), "y": y.name,
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.add", "paddle.add",
inputs=inputs_dict, inputs=inputs_dict,
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def MatMul(self, node): def MatMul(self, node):
...@@ -1216,30 +1207,30 @@ class OpSet9(): ...@@ -1216,30 +1207,30 @@ class OpSet9():
val_y = self.graph.get_input_node(node, idx=1, copy=True) val_y = self.graph.get_input_node(node, idx=1, copy=True)
x_shape = val_x.out_shapes[0] x_shape = val_x.out_shapes[0]
y_shape = val_y.out_shapes[0] y_shape = val_y.out_shapes[0]
inputs_dict = {"x": self.get_node_name(val_x), inputs_dict = {"x": val_x.name,
"y": self.get_node_name(val_y)} "y": val_y.name}
if y_shape[0] == 1 and x_shape[-1] != 1 and x_shape[0] != 1: if y_shape[0] == 1 and x_shape[-1] != 1 and x_shape[0] != 1:
y_squeeze = val_y.layer_name + '_squeeze' y_squeeze = val_y.name + '_squeeze'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.squeeze", "paddle.squeeze",
inputs={"x": self.get_node_name(val_y)}, inputs={"x": val_y.name},
outputs=[y_squeeze], outputs=[y_squeeze],
axis=[0]) axis=[0])
inputs_dict['y'] = y_squeeze inputs_dict['y'] = y_squeeze
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.matmul", "paddle.matmul",
inputs=inputs_dict, inputs=inputs_dict,
outputs=[node.layer_name]) outputs=[node.name])
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.matmul", "paddle.matmul",
inputs=inputs_dict, inputs=inputs_dict,
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def BatchNormalization(self, node): def BatchNormalization(self, node):
op_name = name_generator("batchnorm", self.nn_name2id) op_name = name_generator("batchnorm", self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_scale = self.graph.get_input_node(node, idx=1, copy=True) val_scale = self.graph.get_input_node(node, idx=1, copy=True)
...@@ -1258,15 +1249,15 @@ class OpSet9(): ...@@ -1258,15 +1249,15 @@ class OpSet9():
"momentum": momentum, "momentum": momentum,
"epsilon": epsilon, "epsilon": epsilon,
"is_test": True, "is_test": True,
"param_attr": string(self.get_node_name(val_scale)), "param_attr": string(val_scale.name),
"bias_attr": string(self.get_node_name(val_b)), "bias_attr": string(val_b.name),
"moving_mean_name": string(self.get_node_name(val_mean)), "moving_mean_name": string(val_mean.name),
"moving_variance_name": string(self.get_node_name(val_var)), "moving_variance_name": string(val_var.name),
"use_global_stats": False, "use_global_stats": False,
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.BatchNorm", "paddle.nn.BatchNorm",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
...@@ -1276,14 +1267,14 @@ class OpSet9(): ...@@ -1276,14 +1267,14 @@ class OpSet9():
perm = node.get_attr('perm') perm = node.get_attr('perm')
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.transpose", "paddle.transpose",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
perm=perm) perm=perm)
@print_mapping_info @print_mapping_info
def PRelu(self, node): def PRelu(self, node):
op_name = name_generator("prelu", self.nn_name2id) op_name = name_generator("prelu", self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_slope = self.graph.get_input_node(node, idx=1, copy=True) val_slope = self.graph.get_input_node(node, idx=1, copy=True)
...@@ -1299,17 +1290,17 @@ class OpSet9(): ...@@ -1299,17 +1290,17 @@ class OpSet9():
# paddle params shape need be [1, channel] # paddle params shape need be [1, channel]
slope_data = _const_weight_or_none(val_slope) slope_data = _const_weight_or_none(val_slope)
slope_data = np.reshape(slope_data, [1] + shape_slope) slope_data = np.reshape(slope_data, [1] + shape_slope)
self.weights[val_slope.layer_name] = slope_data self.weights[val_slope.name] = slope_data
num_parameters = val_x.out_shapes[0][1] num_parameters = val_x.out_shapes[0][1]
else: else:
num_parameters = 1 num_parameters = 1
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nn.PReLU", "paddle.nn.PReLU",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=layer_outputs, outputs=layer_outputs,
num_parameters=num_parameters, num_parameters=num_parameters,
weight_attr=string(val_slope.layer_name)) weight_attr=string(val_slope.name))
@print_mapping_info @print_mapping_info
def Squeeze(self, node): def Squeeze(self, node):
...@@ -1318,14 +1309,14 @@ class OpSet9(): ...@@ -1318,14 +1309,14 @@ class OpSet9():
if len(val_x.out_shapes[0]) == 1: if len(val_x.out_shapes[0]) == 1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.cast", "paddle.cast",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
dtype=string(val_x.dtype)) dtype=string(val_x.dtype))
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.squeeze", "paddle.squeeze",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
axis=axes) axis=axes)
@print_mapping_info @print_mapping_info
...@@ -1334,9 +1325,9 @@ class OpSet9(): ...@@ -1334,9 +1325,9 @@ class OpSet9():
val_y = self.graph.get_input_node(node, idx=1, copy=True) val_y = self.graph.get_input_node(node, idx=1, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.equal", "paddle.equal",
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'y': self.get_node_name(val_y)}, 'y': val_y.name},
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def Greater(self, node): def Greater(self, node):
...@@ -1344,8 +1335,8 @@ class OpSet9(): ...@@ -1344,8 +1335,8 @@ class OpSet9():
val_y = self.graph.get_input_node(node, idx=1, copy=True) val_y = self.graph.get_input_node(node, idx=1, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.greater_than", "paddle.greater_than",
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'y': self.get_node_name(val_y)}, 'y': val_y.name},
outputs=node, outputs=node,
param_attr=None) param_attr=None)
...@@ -1355,10 +1346,10 @@ class OpSet9(): ...@@ -1355,10 +1346,10 @@ class OpSet9():
val_x = self.graph.get_input_node(node, idx=1, copy=True) val_x = self.graph.get_input_node(node, idx=1, copy=True)
val_y = self.graph.get_input_node(node, idx=2, copy=True) val_y = self.graph.get_input_node(node, idx=2, copy=True)
not_condition = condition.layer_name + '_not' not_condition = condition.name + '_not'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.logical_not", "paddle.logical_not",
inputs={"x": self.get_node_name(condition)}, inputs={"x": condition.name},
outputs=[not_condition]) outputs=[not_condition])
cast_not_condition = not_condition + '_cast' cast_not_condition = not_condition + '_cast'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -1366,22 +1357,22 @@ class OpSet9(): ...@@ -1366,22 +1357,22 @@ class OpSet9():
inputs={"x": not_condition}, inputs={"x": not_condition},
outputs=[cast_not_condition], outputs=[cast_not_condition],
dtype=string(val_x.dtype)) dtype=string(val_x.dtype))
cast_condition = condition.layer_name + '_cast' cast_condition = condition.name + '_cast'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.cast", "paddle.cast",
inputs={"x": self.get_node_name(condition)}, inputs={"x": condition.name},
outputs=[cast_condition], outputs=[cast_condition],
dtype=string(val_x.dtype)) dtype=string(val_x.dtype))
mul_val_x = val_x.layer_name + '_mul' mul_val_x = val_x.name + '_mul'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.multiply", "paddle.multiply",
inputs={'x': self.get_node_name(val_x), inputs={'x': val_x.name,
'y': cast_condition}, 'y': cast_condition},
outputs=[mul_val_x]) outputs=[mul_val_x])
mul_val_y = val_y.layer_name + '_mul' mul_val_y = val_y.name + '_mul'
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.multiply", "paddle.multiply",
inputs={'x': self.get_node_name(val_y), inputs={'x': val_y.name,
'y': cast_not_condition}, 'y': cast_not_condition},
outputs=[mul_val_y]) outputs=[mul_val_y])
...@@ -1389,7 +1380,7 @@ class OpSet9(): ...@@ -1389,7 +1380,7 @@ class OpSet9():
"paddle.add", "paddle.add",
inputs={'x': mul_val_x, inputs={'x': mul_val_x,
'y': mul_val_y}, 'y': mul_val_y},
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def NonZero(self, node): def NonZero(self, node):
...@@ -1398,36 +1389,36 @@ class OpSet9(): ...@@ -1398,36 +1389,36 @@ class OpSet9():
if val_x_dim == 1: if val_x_dim == 1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nonzero", "paddle.nonzero",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[self.get_node_name(val_x)]) outputs=[val_x.name])
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.transpose", "paddle.transpose",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_naem], outputs=[node.layer_naem],
perm=[1, 0]) perm=[1, 0])
if val_x_dim > 1: if val_x_dim > 1:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.nonzero", "paddle.nonzero",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[self.get_node_name(val_x)]) outputs=[val_x.name])
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.split", "paddle.split",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[self.get_node_name(val_x)], outputs=[val_x.name],
num_or_sections=1, num_or_sections=1,
axis=val_x_dim) axis=val_x_dim)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.concat", "paddle.concat",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def Identity(self, node): def Identity(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.assign", "paddle.assign",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name]) outputs=[node.name])
@print_mapping_info @print_mapping_info
def Tile(self, node): def Tile(self, node):
...@@ -1436,7 +1427,7 @@ class OpSet9(): ...@@ -1436,7 +1427,7 @@ class OpSet9():
repeats = _const_weight_or_none(val_repeats) repeats = _const_weight_or_none(val_repeats)
if repeats is None: if repeats is None:
repeats = val_repeats.layer_name repeats = val_repeats.name
if val_repeats.dtype != 'int32': if val_repeats.dtype != 'int32':
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.cast", "paddle.cast",
...@@ -1450,18 +1441,18 @@ class OpSet9(): ...@@ -1450,18 +1441,18 @@ class OpSet9():
attr = { attr = {
'expand_times': repeats, 'expand_times': repeats,
"name": string(node.layer_name), "name": string(node.name),
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.tile", "paddle.tile",
inputs={"x": self.get_node_name(val_x)}, inputs={"x": val_x.name},
outputs=[node.layer_name], outputs=[node.name],
repeat_times=repeats) repeat_times=repeats)
@print_mapping_info @print_mapping_info
def MaxPool(self, node): def MaxPool(self, node):
op_name = name_generator("pool", self.nn_name2id) op_name = name_generator("pool", self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
auto_pad = node.get_attr('auto_pad', 'NOTSET') auto_pad = node.get_attr('auto_pad', 'NOTSET')
...@@ -1495,14 +1486,14 @@ class OpSet9(): ...@@ -1495,14 +1486,14 @@ class OpSet9():
} }
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={'x': val_x if isinstance(val_x, str) else self.get_node_name(val_x)}, inputs={'x': val_x if isinstance(val_x, str) else val_x.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
@print_mapping_info @print_mapping_info
def GlobalMaxPool(self, node): def GlobalMaxPool(self, node):
op_name = name_generator("pool", self.nn_name2id) op_name = name_generator("pool", self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
input_shape = val_x.out_shapes[0] input_shape = val_x.out_shapes[0]
...@@ -1517,14 +1508,14 @@ class OpSet9(): ...@@ -1517,14 +1508,14 @@ class OpSet9():
output_shape = node.out_shapes[0] output_shape = node.out_shapes[0]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={'x': self.get_node_name(val_x)}, inputs={'x': val_x.name},
outputs=layer_outputs, outputs=layer_outputs,
output_size=output_shape[2:]) output_size=output_shape[2:])
@print_mapping_info @print_mapping_info
def GlobalAveragePool(self, node): def GlobalAveragePool(self, node):
op_name = name_generator("pool", self.nn_name2id) op_name = name_generator("pool", self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
input_shape = val_x.out_shapes[0] input_shape = val_x.out_shapes[0]
...@@ -1539,14 +1530,14 @@ class OpSet9(): ...@@ -1539,14 +1530,14 @@ class OpSet9():
output_shape = node.out_shapes[0] output_shape = node.out_shapes[0]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={'x': self.get_node_name(val_x)}, inputs={'x': val_x.name},
outputs=layer_outputs, outputs=layer_outputs,
output_size=output_shape[2:]) output_size=output_shape[2:])
@print_mapping_info @print_mapping_info
def Conv(self, node): def Conv(self, node):
op_name = name_generator("conv", self.nn_name2id) op_name = name_generator("conv", self.nn_name2id)
output_name = node.layer_name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
val_w = self.graph.get_input_node(node, idx=1, copy=True) val_w = self.graph.get_input_node(node, idx=1, copy=True)
...@@ -1586,15 +1577,15 @@ class OpSet9(): ...@@ -1586,15 +1577,15 @@ class OpSet9():
"padding": paddings, "padding": paddings,
"dilation": dilations, "dilation": dilations,
"groups": num_groups, "groups": num_groups,
'weight_attr': string(val_w.layer_name), 'weight_attr': string(val_w.name),
} }
if has_bias: if has_bias:
layer_attrs["bias_attr"] = string(val_b.layer_name) layer_attrs["bias_attr"] = string(val_b.name)
else: else:
layer_attrs["bias_attr"] = False layer_attrs["bias_attr"] = False
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={'x': val_x if isinstance(val_x, str) else self.get_node_name(val_x)}, inputs={'x': val_x if isinstance(val_x, str) else val_x.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
...@@ -1640,16 +1631,16 @@ class OpSet9(): ...@@ -1640,16 +1631,16 @@ class OpSet9():
# 'stride': strides, # 'stride': strides,
# 'dilation': dilations, # 'dilation': dilations,
# 'groups': num_groups, # 'groups': num_groups,
# 'weight_attr': string(val_w.layer_name), # 'weight_attr': string(val_w.name),
# 'bias_attr': None if val_b is None else string(val_b.layer_name), # 'bias_attr': None if val_b is None else string(val_b.name),
# } # }
# self.paddle_graph.add_layer( # self.paddle_graph.add_layer(
# paddle_op, # paddle_op,
# inputs={"x": self.get_node_name(val_x)}, # inputs={"x": val_x.name},
# outputs=layer_outputs, # outputs=layer_outputs,
# **layer_attrs) # **layer_attrs)
inputs_dict = {'x': val_x if isinstance(val_x, str) else self.get_node_name(val_x), inputs_dict = {'x': val_x if isinstance(val_x, str) else val_x.name,
"weight": val_w.layer_name} "weight": val_w.name}
layer_attrs = { layer_attrs = {
"stride": strides, "stride": strides,
"dilation": dilations, "dilation": dilations,
...@@ -1657,11 +1648,11 @@ class OpSet9(): ...@@ -1657,11 +1648,11 @@ class OpSet9():
"groups": num_groups, "groups": num_groups,
"output_size": node.out_shapes[0][2:]} "output_size": node.out_shapes[0][2:]}
if val_b is not None: if val_b is not None:
inputs_dict["bias"] = val_b.layer_name inputs_dict["bias"] = val_b.name
else: else:
layer_attrs["bias"] = None layer_attrs["bias"] = None
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.nn.functional.conv2d_transpose", kernel="paddle.nn.functional.conv2d_transpose",
inputs=inputs_dict, inputs=inputs_dict,
outputs=[node.layer_name], outputs=[node.name],
**layer_attrs) **layer_attrs)
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from x2paddle.decoder.tf_decoder import TFGraph from x2paddle.decoder.tf_decoder import TFGraph, TFGraphNode
from x2paddle.core.program import PaddleGraph from x2paddle.core.program import PaddleGraph
from x2paddle.core.op_mapper import OpMapper from x2paddle.core.op_mapper import OpMapper
from x2paddle.core.util import * from x2paddle.core.util import *
...@@ -58,10 +58,9 @@ class TFOpMapper(OpMapper): ...@@ -58,10 +58,9 @@ class TFOpMapper(OpMapper):
'swish_f32': ['paddle.nn.Swish'], 'swish_f32': ['paddle.nn.Swish'],
'Tanh': ['paddle.nn.Tanh'], 'Tanh': ['paddle.nn.Tanh'],
'Softplus': ['paddle.nn.Softplus'], 'Softplus': ['paddle.nn.Softplus'],
'LeakyRelu': ['paddle.nn.LeakyReLU', { 'LeakyRelu': ['paddle.nn.LeakyReLU',
'alpha': 'negative_slope' dict(alpha='negative_slope')],
}], 'Softmax': ['paddle.nn.Softmax'],
'Softmax': ['paddle.nn.Softmax', {'axis': 'axis'}],
'Floor': ['paddle.floor'], 'Floor': ['paddle.floor'],
'Erf': ['paddle.erf'], 'Erf': ['paddle.erf'],
'Square': ['paddle.square'] 'Square': ['paddle.square']
...@@ -83,12 +82,14 @@ class TFOpMapper(OpMapper): ...@@ -83,12 +82,14 @@ class TFOpMapper(OpMapper):
super(TFOpMapper, self).__init__() super(TFOpMapper, self).__init__()
self.decoder = decoder self.decoder = decoder
self.graph = decoder.tf_graph self.graph = decoder.tf_graph
if not self.op_checker():
raise Exception("Model is not supported yet.")
self.params = dict() self.params = dict()
self.nn_name2id = dict() self.nn_name2id = dict()
self.input_index = 0 self.input_index = 0
self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="dygraph", source_type="tf")
self.used_custom_layers = dict()
self.inputs_info = dict() self.inputs_info = dict()
self.paddle_graph = PaddleGraph(parent_layer=None, graph_type="dygraph", source_type="tf")
self.paddle_graph.outputs = self.graph.output_nodes
not_placeholder = list() not_placeholder = list()
for name in self.graph.input_nodes: for name in self.graph.input_nodes:
...@@ -102,80 +103,81 @@ class TFOpMapper(OpMapper): ...@@ -102,80 +103,81 @@ class TFOpMapper(OpMapper):
idx = self.graph.input_nodes.index(name) idx = self.graph.input_nodes.index(name)
del self.graph.input_nodes[idx] del self.graph.input_nodes[idx]
self.paddle_graph.outputs = self.graph.output_nodes print("Total nodes: {}".format(
sum([
unsupported_ops = set() isinstance(node, TFGraphNode)
sys.stderr.write("Total nodes: {}\n".format(len(self.graph.topo_sort))) for name, node in self.graph.node_map.items()
])))
print("Nodes converting ...")
for i, node_name in enumerate(self.graph.topo_sort): for i, node_name in enumerate(self.graph.topo_sort):
sys.stderr.write("\rConverting node {} ... ".format(i + 1)) sys.stderr.write("\rConverting node {} ... ".format(i + 1))
node = self.graph.get_node(node_name) node = self.graph.get_node(node_name)
op = node.layer_type op = node.layer_type
if op in self.directly_map_ops: if op in self.directly_map_ops:
if len(unsupported_ops) > 0:
continue
self.directly_map(node) self.directly_map(node)
elif op in self.elementwise_ops: elif op in self.elementwise_ops:
if len(unsupported_ops) > 0:
continue
self.elementwise_map(node) self.elementwise_map(node)
elif hasattr(self, op): elif hasattr(self, op):
if len(unsupported_ops) > 0:
continue
func = getattr(self, op) func = getattr(self, op)
try:
func(node) func(node)
except Exception as e: print("\nNodes converted.")
self.paddle_graph.set_name(self.graph.graph_name)
self.paddle_graph.set_parameters(self.params)
self.paddle_graph.set_inputs_info(self.inputs_info)
def op_checker(self):
unsupported_ops = set()
for node_name in self.graph.topo_sort:
node = self.graph.get_node(node_name)
op = node.layer_type
if not hasattr(self, op) and \
op not in self.directly_map_ops and \
op not in self.elementwise_ops:
unsupported_ops.add(op) unsupported_ops.add(op)
print("\n{}\n".format(traceback.format_exc())) if len(unsupported_ops) == 0:
return True
else: else:
unsupported_ops.add(op)
if len(unsupported_ops) > 0: if len(unsupported_ops) > 0:
print("\n========= {} OPs are not supported yet ===========".format( print("\n========= {} OPs are not supported yet ===========".format(
len(unsupported_ops))) len(unsupported_ops)))
for op in unsupported_ops: for op in unsupported_ops:
print("========== {} ============".format(op)) print("========== {} ============".format(op))
sys.exit(-1) return False
sys.stderr.write("\nDone!\n")
self.paddle_graph.set_name(self.graph.graph_name)
self.paddle_graph.set_parameters(self.params)
self.paddle_graph.set_inputs_info(self.inputs_info)
def directly_map(self, node): def directly_map(self, node):
assert node.layer_type in self.directly_map_ops inputs = node.layer.input
assert len(inputs) == 1, 'directly_map error with multi inputs'
op_info = self.directly_map_ops[node.layer_type] op_info = self.directly_map_ops[node.layer_type]
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
paddle_op = op_info[0]
layer_attrs = dict() layer_attrs = dict()
for param in op_info[1:]: if len(op_info) > 1:
tf_param_name = list(param.keys())[0] attrs_name_map_dict = op_info[1]
pd_param_name = list(param.values())[0] for tf_attr_name, pd_attr_name in attrs_name_map_dict:
tf_param = node.get_attr(tf_param_name) layer_attrs[pd_attr_name] = node.get_attr(tf_attr_name)
layer_attrs[pd_param_name] = tf_param if paddle_op.startswith("paddle.nn"):
op_name = paddle_op[10:].lower()
if op_info[0].startswith("paddle.nn"):
op_name = op_info[0][10:].lower()
op_name = name_generator(op_name, self.nn_name2id) op_name = name_generator(op_name, self.nn_name2id)
output_name = node.name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel=op_info[0], kernel=paddle_op,
inputs={"x": input.name}, inputs={"x": input.name},
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
else: else:
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel=op_info[0], kernel=paddle_op,
inputs={"x": input.name}, inputs={"x": input.name},
outputs=[node.name], outputs=[node.name],
**layer_attrs) **layer_attrs)
def elementwise_map(self, node): def elementwise_map(self, node):
assert node.layer_type in self.elementwise_ops
op_type = self.elementwise_ops[node.layer_type] op_type = self.elementwise_ops[node.layer_type]
x = self.graph.get_node(node.layer.input[0]) x = self.graph.get_input_node(node, 0)
y = self.graph.get_node(node.layer.input[1]) y = self.graph.get_input_node(node, 1)
x_shape = x.out_shapes[0] x_shape = x.out_shapes[0]
y_shape = y.out_shapes[0] y_shape = y.out_shapes[0]
layer_id = self.paddle_graph.add_layer( layer_id = self.paddle_graph.add_layer(
kernel=op_type, kernel=op_type,
inputs={"x": x.name, inputs={"x": x.name,
...@@ -184,8 +186,8 @@ class TFOpMapper(OpMapper): ...@@ -184,8 +186,8 @@ class TFOpMapper(OpMapper):
self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape} self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape}
def NotEqual(self, node): def NotEqual(self, node):
x = self.graph.get_node(node.layer.input[0]) x = self.graph.get_input_node(node, 0)
y = self.graph.get_node(node.layer.input[1]) y = self.graph.get_input_node(node, 1)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.not_equal", kernel="paddle.not_equal",
...@@ -236,8 +238,8 @@ class TFOpMapper(OpMapper): ...@@ -236,8 +238,8 @@ class TFOpMapper(OpMapper):
default_initializer="paddle.nn.initializer.Constant(value=0.0)") default_initializer="paddle.nn.initializer.Constant(value=0.0)")
def Transpose(self, node): def Transpose(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
perm = self.graph.get_node(node.layer.input[1]) perm = self.graph.get_input_node(node, 1)
assert perm.layer_type == "Const", "Perm of transpose OP should be Const" assert perm.layer_type == "Const", "Perm of transpose OP should be Const"
perm = perm.value.tolist() perm = perm.value.tolist()
...@@ -248,8 +250,8 @@ class TFOpMapper(OpMapper): ...@@ -248,8 +250,8 @@ class TFOpMapper(OpMapper):
perm=perm) perm=perm)
def Fill(self, node): def Fill(self, node):
dims = self.graph.get_node(node.layer.input[0]) dims = self.graph.get_input_node(node, 0)
input_value = self.graph.get_node(node.layer.input[1]) input_value = self.graph.get_input_node(node, 1)
inputs = dict() inputs = dict()
layer_attrs = dict() layer_attrs = dict()
assert input_value.layer_type == "Const", "Value of fill OP should be Const" assert input_value.layer_type == "Const", "Value of fill OP should be Const"
...@@ -268,7 +270,7 @@ class TFOpMapper(OpMapper): ...@@ -268,7 +270,7 @@ class TFOpMapper(OpMapper):
**layer_attrs) **layer_attrs)
def DepthToSpace(self, node): def DepthToSpace(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
block_size = node.get_attr("block_size") block_size = node.get_attr("block_size")
data_format = node.get_attr("data_format").decode() data_format = node.get_attr("data_format").decode()
...@@ -323,7 +325,7 @@ class TFOpMapper(OpMapper): ...@@ -323,7 +325,7 @@ class TFOpMapper(OpMapper):
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def MaxPool(self, node): def MaxPool(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
k_size = node.get_attr("ksize") k_size = node.get_attr("ksize")
strides = node.get_attr("strides") strides = node.get_attr("strides")
...@@ -365,8 +367,8 @@ class TFOpMapper(OpMapper): ...@@ -365,8 +367,8 @@ class TFOpMapper(OpMapper):
op_name = name_generator("conv", self.nn_name2id) op_name = name_generator("conv", self.nn_name2id)
output_name = node.name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
kernel = self.graph.get_node(node.layer.input[1]) kernel = self.graph.get_input_node(node, 1)
k_size = kernel.out_shapes[0] k_size = kernel.out_shapes[0]
strides = node.get_attr("strides") strides = node.get_attr("strides")
...@@ -381,7 +383,7 @@ class TFOpMapper(OpMapper): ...@@ -381,7 +383,7 @@ class TFOpMapper(OpMapper):
if kernel.layer_type == 'Const': if kernel.layer_type == 'Const':
kernel_value = kernel.value kernel_value = kernel.value
else: else:
kernel_value = self.decoder.infer_tensor(kernel) kernel_value = self.decoder.infer_tensor(kernel, use_diff_inputs=False)
kernel_weight_name = op_name + ".weight" kernel_weight_name = op_name + ".weight"
self.params[kernel_weight_name] = numpy.transpose(kernel_value, self.params[kernel_weight_name] = numpy.transpose(kernel_value,
(3, 2, 0, 1)) (3, 2, 0, 1))
...@@ -428,8 +430,8 @@ class TFOpMapper(OpMapper): ...@@ -428,8 +430,8 @@ class TFOpMapper(OpMapper):
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def BiasAdd(self, node): def BiasAdd(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
bias = self.graph.get_node(node.layer.input[1]) bias = self.graph.get_input_node(node, 1)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.add", kernel="paddle.add",
inputs={"x": input.name, inputs={"x": input.name,
...@@ -440,12 +442,12 @@ class TFOpMapper(OpMapper): ...@@ -440,12 +442,12 @@ class TFOpMapper(OpMapper):
op_name = name_generator("bn", self.nn_name2id) op_name = name_generator("bn", self.nn_name2id)
output_name = node.name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
gamma = self.graph.get_node(node.layer.input[1]) gamma = self.graph.get_input_node(node, 1)
beta = self.graph.get_node(node.layer.input[2]) beta = self.graph.get_input_node(node, 2)
moving_mean = self.graph.get_node(node.layer.input[3]) moving_mean = self.graph.get_input_node(node, 3)
moving_var = self.graph.get_node(node.layer.input[4]) moving_var = self.graph.get_input_node(node, 4)
data_format = node.get_attr("data_format").decode() data_format = node.get_attr("data_format").decode()
assert gamma.layer_type == "Const" assert gamma.layer_type == "Const"
...@@ -490,8 +492,8 @@ class TFOpMapper(OpMapper): ...@@ -490,8 +492,8 @@ class TFOpMapper(OpMapper):
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def Mean(self, node): def Mean(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
reduce_idx = self.graph.get_node(node.layer.input[1]) reduce_idx = self.graph.get_input_node(node, 1)
assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]" assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
dims = reduce_idx.value.tolist() dims = reduce_idx.value.tolist()
keep_dims = node.get_attr("keep_dims") keep_dims = node.get_attr("keep_dims")
...@@ -504,8 +506,8 @@ class TFOpMapper(OpMapper): ...@@ -504,8 +506,8 @@ class TFOpMapper(OpMapper):
keepdim=keep_dims) keepdim=keep_dims)
def Reshape(self, node): def Reshape(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
param = self.graph.get_node(node.layer.input[1]) param = self.graph.get_input_node(node, 1)
input_name = input.name input_name = input.name
...@@ -533,8 +535,8 @@ class TFOpMapper(OpMapper): ...@@ -533,8 +535,8 @@ class TFOpMapper(OpMapper):
shape=out_shape.tolist()) shape=out_shape.tolist())
def Pad(self, node): def Pad(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
paddings = self.graph.get_node(node.layer.input[1]) paddings = self.graph.get_input_node(node, 1)
assert paddings.layer_type == "Const", "Padding should be Const" assert paddings.layer_type == "Const", "Padding should be Const"
paddings = paddings.value.flatten().tolist() paddings = paddings.value.flatten().tolist()
...@@ -566,7 +568,7 @@ class TFOpMapper(OpMapper): ...@@ -566,7 +568,7 @@ class TFOpMapper(OpMapper):
pad=paddings) pad=paddings)
def Squeeze(self, node): def Squeeze(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
squeeze_dims = node.get_attr('squeeze_dims') squeeze_dims = node.get_attr('squeeze_dims')
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.squeeze", kernel="paddle.squeeze",
...@@ -575,7 +577,7 @@ class TFOpMapper(OpMapper): ...@@ -575,7 +577,7 @@ class TFOpMapper(OpMapper):
axis=squeeze_dims) axis=squeeze_dims)
def Shape(self, node): def Shape(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
input_name = input.name input_name = input.name
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.shape", kernel="paddle.shape",
...@@ -583,8 +585,8 @@ class TFOpMapper(OpMapper): ...@@ -583,8 +585,8 @@ class TFOpMapper(OpMapper):
outputs=[node.name]) outputs=[node.name])
def ArgMax(self, node): def ArgMax(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
axis = self.graph.get_node(node.layer.input[1]) axis = self.graph.get_input_node(node, 1)
assert axis.layer_type == "Const", "ArgMax only support Const parameter" assert axis.layer_type == "Const", "ArgMax only support Const parameter"
axis = axis.value axis = axis.value
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -594,8 +596,8 @@ class TFOpMapper(OpMapper): ...@@ -594,8 +596,8 @@ class TFOpMapper(OpMapper):
axis=axis) axis=axis)
def MatMul(self, node): def MatMul(self, node):
x = self.graph.get_node(node.layer.input[0]) x = self.graph.get_input_node(node, 0)
y = self.graph.get_node(node.layer.input[1]) y = self.graph.get_input_node(node, 1)
transpose_a = node.get_attr('transpose_a') transpose_a = node.get_attr('transpose_a')
transpose_b = node.get_attr('transpose_b') transpose_b = node.get_attr('transpose_b')
if transpose_a is None: if transpose_a is None:
...@@ -620,8 +622,8 @@ class TFOpMapper(OpMapper): ...@@ -620,8 +622,8 @@ class TFOpMapper(OpMapper):
op_name = name_generator("conv", self.nn_name2id) op_name = name_generator("conv", self.nn_name2id)
output_name = node.name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
kernel = self.graph.get_node(node.layer.input[1]) kernel = self.graph.get_input_node(node, 1)
assert kernel.layer_type == "Const", "Kernel of DepthwiseConv2DNative should be Const" assert kernel.layer_type == "Const", "Kernel of DepthwiseConv2DNative should be Const"
in_shape = input.out_shapes[0] in_shape = input.out_shapes[0]
...@@ -671,7 +673,7 @@ class TFOpMapper(OpMapper): ...@@ -671,7 +673,7 @@ class TFOpMapper(OpMapper):
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def AvgPool(self, node): def AvgPool(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
k_size = node.get_attr("ksize") k_size = node.get_attr("ksize")
strides = node.get_attr("strides") strides = node.get_attr("strides")
...@@ -720,8 +722,10 @@ class TFOpMapper(OpMapper): ...@@ -720,8 +722,10 @@ class TFOpMapper(OpMapper):
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def Pack(self, node): def Pack(self, node):
inputs = [self.graph.get_node(name) for name in node.layer.input] inputs_list = list()
input_names = [i.name for i in inputs] for i in range(len(node.inputs)):
inputs_list.append(self.graph.get_input_node(node, i))
input_names = [i.name for i in inputs_list]
axis = node.get_attr("axis") axis = node.get_attr("axis")
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.stack", kernel="paddle.stack",
...@@ -736,7 +740,7 @@ class TFOpMapper(OpMapper): ...@@ -736,7 +740,7 @@ class TFOpMapper(OpMapper):
shape=[-1]) shape=[-1])
def Unpack(self, node): def Unpack(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
axis = node.get_attr("axis") axis = node.get_attr("axis")
num = node.get_attr("num") num = node.get_attr("num")
shape = input.out_shapes[0] shape = input.out_shapes[0]
...@@ -760,14 +764,16 @@ class TFOpMapper(OpMapper): ...@@ -760,14 +764,16 @@ class TFOpMapper(OpMapper):
num=num) num=num)
def ConcatV2(self, node): def ConcatV2(self, node):
inputs = [self.graph.get_node(name) for name in node.layer.input[:-1]] inputs_list = list()
axis = self.graph.get_node(node.layer.input[-1]) for i in range(len(node.inputs) - 1):
inputs_list.append(self.graph.get_input_node(node, i))
axis = self.graph.get_input_node(node, -1)
assert axis.layer_type == "Const", "axis for ConcatV2 must be type Const" assert axis.layer_type == "Const", "axis for ConcatV2 must be type Const"
axis = axis.value axis = axis.value
if axis < 0: if axis < 0:
axis += len(inputs[0].out_shapes[0]) axis += len(inputs[0].out_shapes[0])
input_names = [i.name for i in inputs] input_names = [i.name for i in inputs_list]
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.concat", kernel="paddle.concat",
inputs={"x": input_names}, inputs={"x": input_names},
...@@ -775,23 +781,23 @@ class TFOpMapper(OpMapper): ...@@ -775,23 +781,23 @@ class TFOpMapper(OpMapper):
axis=axis) axis=axis)
def StridedSlice(self, node): def StridedSlice(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
begin = self.graph.get_node(node.layer.input[1]) begin = self.graph.get_input_node(node, 1)
end = self.graph.get_node(node.layer.input[2]) end = self.graph.get_input_node(node, 2)
strides = self.graph.get_node(node.layer.input[3]) strides = self.graph.get_input_node(node, 3)
if strides.layer_type == "Const": if strides.layer_type == "Const":
strides = strides.value.tolist() strides = strides.value.tolist()
else: else:
strides = self.decoder.infer_shape_tensor(strides) strides = self.decoder.infer_tensor(strides)
if begin.layer_type == "Const": if begin.layer_type == "Const":
begin = begin.value.tolist() begin = begin.value.tolist()
else: else:
begin = self.decoder.infer_shape_tensor(begin) begin = self.decoder.infer_tensor(begin)
if end.layer_type == "Const": if end.layer_type == "Const":
end = end.value.tolist() end = end.value.tolist()
else: else:
end = self.decoder.infer_shape_tensor(end) end = self.decoder.infer_tensor(end)
assert len(set(strides)) == 1 and strides[ assert len(set(strides)) == 1 and strides[
0] == 1, "Only support strides be 1 in StridedSlice OP" 0] == 1, "Only support strides be 1 in StridedSlice OP"
...@@ -865,8 +871,8 @@ class TFOpMapper(OpMapper): ...@@ -865,8 +871,8 @@ class TFOpMapper(OpMapper):
axis=shrink_axes) axis=shrink_axes)
def Split(self, node): def Split(self, node):
dim = self.graph.get_node(node.layer.input[0]) dim = self.graph.get_input_node(node, 0)
input = self.graph.get_node(node.layer.input[1]) input = self.graph.get_input_node(node, 1)
assert dim.layer_type == "Const" assert dim.layer_type == "Const"
num_split = node.get_attr('num_split') num_split = node.get_attr('num_split')
dim = dim.value dim = dim.value
...@@ -881,9 +887,9 @@ class TFOpMapper(OpMapper): ...@@ -881,9 +887,9 @@ class TFOpMapper(OpMapper):
axis=dim) axis=dim)
def Slice(self, node): def Slice(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
begin = self.graph.get_node(node.layer.input[1]) begin = self.graph.get_input_node(node, 1)
size = self.graph.get_node(node.layer.input[2]) size = self.graph.get_input_node(node, 2)
inputs = {"x": input.name} inputs = {"x": input.name}
attrs = {} attrs = {}
...@@ -920,8 +926,8 @@ class TFOpMapper(OpMapper): ...@@ -920,8 +926,8 @@ class TFOpMapper(OpMapper):
**attrs) **attrs)
def ResizeNearestNeighbor(self, node): def ResizeNearestNeighbor(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
resize_shape = self.graph.get_node(node.layer.input[1]) resize_shape = self.graph.get_input_node(node, 1)
data_format = "NHWC" data_format = "NHWC"
inputs = {"x": input.name} inputs = {"x": input.name}
attrs = {"align_corners": node.get_attr("align_corners"), attrs = {"align_corners": node.get_attr("align_corners"),
...@@ -964,8 +970,8 @@ class TFOpMapper(OpMapper): ...@@ -964,8 +970,8 @@ class TFOpMapper(OpMapper):
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def ResizeBilinear(self, node): def ResizeBilinear(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
resize_shape = self.graph.get_node(node.layer.input[1]) resize_shape = self.graph.get_input_node(node, 1)
data_format = "NHWC" data_format = "NHWC"
inputs = {"x": input.name} inputs = {"x": input.name}
attrs = {"align_corners": node.get_attr("align_corners"), attrs = {"align_corners": node.get_attr("align_corners"),
...@@ -1008,7 +1014,7 @@ class TFOpMapper(OpMapper): ...@@ -1008,7 +1014,7 @@ class TFOpMapper(OpMapper):
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def Cast(self, node): def Cast(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
dtype = node.dtype dtype = node.dtype
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="paddle.cast", kernel="paddle.cast",
...@@ -1017,8 +1023,8 @@ class TFOpMapper(OpMapper): ...@@ -1017,8 +1023,8 @@ class TFOpMapper(OpMapper):
dtype=string(dtype)) dtype=string(dtype))
def Sum(self, node): def Sum(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
reduce_idx = self.graph.get_node(node.layer.input[1]) reduce_idx = self.graph.get_input_node(node, 1)
assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]" assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
keep_dims = node.get_attr("keep_dims") keep_dims = node.get_attr("keep_dims")
dim = reduce_idx.value.tolist() dim = reduce_idx.value.tolist()
...@@ -1031,8 +1037,8 @@ class TFOpMapper(OpMapper): ...@@ -1031,8 +1037,8 @@ class TFOpMapper(OpMapper):
keepdim=keep_dims) keepdim=keep_dims)
def Max(self, node): def Max(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
reduce_idx = self.graph.get_node(node.layer.input[1]) reduce_idx = self.graph.get_input_node(node, 1)
assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]" assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
keep_dims = node.get_attr("keep_dims") keep_dims = node.get_attr("keep_dims")
dim = reduce_idx.value.tolist() dim = reduce_idx.value.tolist()
...@@ -1044,7 +1050,7 @@ class TFOpMapper(OpMapper): ...@@ -1044,7 +1050,7 @@ class TFOpMapper(OpMapper):
keepdim=keep_dims) keepdim=keep_dims)
def RandomUniform(self, node): def RandomUniform(self, node):
shape = self.graph.get_node(node.layer.input[0]) shape = self.graph.get_input_node(node, 0)
if shape.layer_type == "Const": if shape.layer_type == "Const":
shape = shape.value.tolist() shape = shape.value.tolist()
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -1066,24 +1072,24 @@ class TFOpMapper(OpMapper): ...@@ -1066,24 +1072,24 @@ class TFOpMapper(OpMapper):
op_name = name_generator("conv", self.nn_name2id) op_name = name_generator("conv", self.nn_name2id)
output_name = node.name output_name = node.name
layer_outputs = [op_name, output_name] layer_outputs = [op_name, output_name]
out_shape = self.graph.get_node(node.layer.input[0]) out_shape = self.graph.get_input_node(node, 0)
kernel = self.graph.get_node(node.layer.input[1]) kernel = self.graph.get_input_node(node, 1)
input = self.graph.get_node(node.layer.input[2]) input = self.graph.get_input_node(node, 2)
assert kernel.layer_type == "Const", "Kernel of Conv2DBackpropInput should be Const" assert kernel.layer_type == "Const", "Kernel of Conv2DBackpropInput should be Const"
if out_shape.layer_type == "Const": if out_shape.layer_type == "Const":
out_shape = out_shape.value.tolist() out_shape = out_shape.value.tolist()
else: else:
out_shape = self.decoder.infer_shape_tensor(out_shape, out_shape = self.decoder.infer_tensor(out_shape,
node.out_shapes[0]) out_shape=node.out_shapes[0])
in_shape = input.out_shapes[0] in_shape = input.out_shapes[0]
if in_shape.count(-1) > 2: if in_shape.count(-1) > 2:
in_shape = self.decoder.infer_tensor(input).shape in_shape = self.decoder.infer_tensor(input, use_diff_inputs=False).shape
k_size = kernel.out_shapes[0] k_size = kernel.out_shapes[0]
if k_size.count(-1) > 2: if k_size.count(-1) > 2:
k_size = self.decoder.infer_tensor(kernel).shape k_size = self.decoder.infer_tensor(kernel, use_diff_inputs=False).shape
pad_mode = node.get_attr("padding").decode() pad_mode = node.get_attr("padding").decode()
strides = node.get_attr("strides") strides = node.get_attr("strides")
...@@ -1145,8 +1151,8 @@ class TFOpMapper(OpMapper): ...@@ -1145,8 +1151,8 @@ class TFOpMapper(OpMapper):
perm=[0, 2, 3, 1]) perm=[0, 2, 3, 1])
def Tile(self, node): def Tile(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
expand_times = self.graph.get_node(node.layer.input[1]) expand_times = self.graph.get_input_node(node, 1)
inputs = {"x": input.name} inputs = {"x": input.name}
attr = dict() attr = dict()
in_shape = input.out_shapes[0] in_shape = input.out_shapes[0]
...@@ -1163,9 +1169,9 @@ class TFOpMapper(OpMapper): ...@@ -1163,9 +1169,9 @@ class TFOpMapper(OpMapper):
**attr) **attr)
def Range(self, node): def Range(self, node):
start = self.graph.get_node(node.layer.input[0]) start = self.graph.get_input_node(node, 0)
limit = self.graph.get_node(node.layer.input[1]) limit = self.graph.get_input_node(node, 1)
delta = self.graph.get_node(node.layer.input[2]) delta = self.graph.get_input_node(node, 2)
inputs = dict() inputs = dict()
attr = dict() attr = dict()
...@@ -1198,8 +1204,8 @@ class TFOpMapper(OpMapper): ...@@ -1198,8 +1204,8 @@ class TFOpMapper(OpMapper):
**attr) **attr)
def SquaredDifference(self, node): def SquaredDifference(self, node):
x = self.graph.get_node(node.layer.input[0]) x = self.graph.get_input_node(node, 0)
y = self.graph.get_node(node.layer.input[1]) y = self.graph.get_input_node(node, 1)
inputs = {"x": x.name, "y": y.name} inputs = {"x": x.name, "y": y.name}
x_shape = x.out_shapes[0] x_shape = x.out_shapes[0]
y_shape = y.out_shapes[0] y_shape = y.out_shapes[0]
...@@ -1215,10 +1221,10 @@ class TFOpMapper(OpMapper): ...@@ -1215,10 +1221,10 @@ class TFOpMapper(OpMapper):
self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape} self.paddle_graph.layers[layer_id].input_shapes = {"x": x_shape, "y": y_shape}
def OneHot(self, node): def OneHot(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
depth = self.graph.get_node(node.layer.input[1]) depth = self.graph.get_input_node(node, 1)
on_value = self.graph.get_node(node.layer.input[2]) on_value = self.graph.get_input_node(node, 2)
off_value = self.graph.get_node(node.layer.input[3]) off_value = self.graph.get_input_node(node, 3)
assert depth.layer_type == 'Const', 'Parameter depth should be Const in OneHot' assert depth.layer_type == 'Const', 'Parameter depth should be Const in OneHot'
assert on_value.layer_type == 'Const', 'Parameter on_value should be Const in OneHot' assert on_value.layer_type == 'Const', 'Parameter on_value should be Const in OneHot'
assert off_value.layer_type == 'Const', 'Parameter off_value should be Const in OneHot' assert off_value.layer_type == 'Const', 'Parameter off_value should be Const in OneHot'
...@@ -1238,8 +1244,8 @@ class TFOpMapper(OpMapper): ...@@ -1238,8 +1244,8 @@ class TFOpMapper(OpMapper):
num_classes=depth.value) num_classes=depth.value)
def Pow(self, node): def Pow(self, node):
x = self.graph.get_node(node.layer.input[0]) x = self.graph.get_input_node(node, 0)
factor = self.graph.get_node(node.layer.input[1]) factor = self.graph.get_input_node(node, 1)
inputs = {"x": x.name} inputs = {"x": x.name}
attr = dict() attr = dict()
if factor.layer_type == 'Const': if factor.layer_type == 'Const':
...@@ -1250,8 +1256,8 @@ class TFOpMapper(OpMapper): ...@@ -1250,8 +1256,8 @@ class TFOpMapper(OpMapper):
"paddle.pow", inputs=inputs, outputs=[node.name], **attr) "paddle.pow", inputs=inputs, outputs=[node.name], **attr)
def All(self, node): def All(self, node):
input = self.graph.get_node(node.layer.input[0]) input = self.graph.get_input_node(node, 0)
reduce_idx = self.graph.get_node(node.layer.input[1]) reduce_idx = self.graph.get_input_node(node, 1)
assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]" assert reduce_idx.layer_type == "Const", "Only support Const parameter[reduce_idx]"
attr = dict() attr = dict()
attr["axis"] = reduce_idx.value.tolist() attr["axis"] = reduce_idx.value.tolist()
...@@ -1274,9 +1280,9 @@ class TFOpMapper(OpMapper): ...@@ -1274,9 +1280,9 @@ class TFOpMapper(OpMapper):
node.layer.attr['dtype'].type = 10 node.layer.attr['dtype'].type = 10
def GatherV2(self, node): def GatherV2(self, node):
embeddings = self.graph.get_node(node.layer.input[0]) embeddings = self.graph.get_input_node(node, 0)
index = self.graph.get_node(node.layer.input[1]) index = self.graph.get_input_node(node, 1)
axis = self.graph.get_node(node.layer.input[2]) axis = self.graph.get_input_node(node, 2)
assert axis.layer_type == 'Const', "Only support Const parameter[axis]" assert axis.layer_type == 'Const', "Only support Const parameter[axis]"
axis = axis.value.tolist() axis = axis.value.tolist()
assert axis == 0, "Only support axis=0 in GatherV2 OP" assert axis == 0, "Only support axis=0 in GatherV2 OP"
...@@ -1303,8 +1309,8 @@ class TFOpMapper(OpMapper): ...@@ -1303,8 +1309,8 @@ class TFOpMapper(OpMapper):
shape=out_shape) shape=out_shape)
def ExpandDims(self, node): def ExpandDims(self, node):
x = self.graph.get_node(node.layer.input[0], copy=True) x = self.graph.get_input_node(node, 0, copy=True)
y = self.graph.get_node(node.layer.input[1], copy=True) y = self.graph.get_input_node(node, 1, copy=True)
inputs = {"x": x.name} inputs = {"x": x.name}
attr = dict() attr = dict()
if y.layer_type == 'Const': if y.layer_type == 'Const':
......
...@@ -231,7 +231,7 @@ class CaffeOpMapper(OpMapper): ...@@ -231,7 +231,7 @@ class CaffeOpMapper(OpMapper):
self.weights[node.layer_name + '_bias'] = data[1] self.weights[node.layer_name + '_bias'] = data[1]
assert len(node.inputs assert len(node.inputs
) == 1, 'The count of Convolution node\'s input is not 1.' ) == 1, 'The count of Convolution node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
layer_attrs = { layer_attrs = {
'filter_size': kernel, 'filter_size': kernel,
'num_filters': channel, 'num_filters': channel,
...@@ -273,7 +273,7 @@ class CaffeOpMapper(OpMapper): ...@@ -273,7 +273,7 @@ class CaffeOpMapper(OpMapper):
self.weights[node.layer_name + '_bias'] = data[1] self.weights[node.layer_name + '_bias'] = data[1]
assert len(node.inputs assert len(node.inputs
) == 1, 'The count of Deconvolution node\'s input is not 1.' ) == 1, 'The count of Deconvolution node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
layer_attrs = { layer_attrs = {
'output_size': None, 'output_size': None,
'filter_size': kernel, 'filter_size': kernel,
...@@ -306,7 +306,7 @@ class CaffeOpMapper(OpMapper): ...@@ -306,7 +306,7 @@ class CaffeOpMapper(OpMapper):
pool_type = 'avg' pool_type = 'avg'
assert len( assert len(
node.inputs) == 1, 'The count of Pooling node\'s input is not 1.' node.inputs) == 1, 'The count of Pooling node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
layer_attrs = { layer_attrs = {
'pool_size': kernel, 'pool_size': kernel,
'pool_stride': stride, 'pool_stride': stride,
...@@ -333,7 +333,7 @@ class CaffeOpMapper(OpMapper): ...@@ -333,7 +333,7 @@ class CaffeOpMapper(OpMapper):
# just scales by alpha (as does Krizhevsky's paper). # just scales by alpha (as does Krizhevsky's paper).
# We'll account for that here. # We'll account for that here.
alpha = params.alpha / float(params.local_size) alpha = params.alpha / float(params.local_size)
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
layer_attrs = { layer_attrs = {
'n': params.local_size, 'n': params.local_size,
'k': params.k, 'k': params.k,
...@@ -381,7 +381,7 @@ class CaffeOpMapper(OpMapper): ...@@ -381,7 +381,7 @@ class CaffeOpMapper(OpMapper):
#params = node.layer.inner_product_param #params = node.layer.inner_product_param
assert params.axis == 1 assert params.axis == 1
assert params.bias_term == True assert params.bias_term == True
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
layer_attrs = { layer_attrs = {
'size': params.num_output, 'size': params.num_output,
'name': string(node.layer_name), 'name': string(node.layer_name),
...@@ -399,7 +399,7 @@ class CaffeOpMapper(OpMapper): ...@@ -399,7 +399,7 @@ class CaffeOpMapper(OpMapper):
def Softmax(self, node): def Softmax(self, node):
assert len( assert len(
node.inputs) == 1, 'The count of Softmax node\'s input is not 1.' node.inputs) == 1, 'The count of Softmax node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.softmax_param params = node.layer.softmax_param
axis = params.axis axis = params.axis
shape = node.input_shape[0] shape = node.input_shape[0]
...@@ -415,7 +415,7 @@ class CaffeOpMapper(OpMapper): ...@@ -415,7 +415,7 @@ class CaffeOpMapper(OpMapper):
def Slice(self, node): def Slice(self, node):
assert len( assert len(
node.inputs) == 1, 'The count of Slice node\'s input is not 1.' node.inputs) == 1, 'The count of Slice node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
top_len = len(node.layer.top) top_len = len(node.layer.top)
params = node.layer.slice_param params = node.layer.slice_param
axis = params.axis axis = params.axis
...@@ -445,7 +445,7 @@ class CaffeOpMapper(OpMapper): ...@@ -445,7 +445,7 @@ class CaffeOpMapper(OpMapper):
) >= 1, 'The count of Concat node\'s input is not more than 1.' ) >= 1, 'The count of Concat node\'s input is not more than 1.'
inputs_list = [] inputs_list = []
for i in range(len(node.inputs)): for i in range(len(node.inputs)):
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_input_node(node, idx=i, copy=True)
inputs_list.append(self.get_input_name(input)) inputs_list.append(self.get_input_name(input))
params = node.layer.concat_param params = node.layer.concat_param
axis = params.axis axis = params.axis
...@@ -464,7 +464,7 @@ class CaffeOpMapper(OpMapper): ...@@ -464,7 +464,7 @@ class CaffeOpMapper(OpMapper):
""" """
assert len( assert len(
node.inputs) == 1, 'The count of ReLU node\'s input is not 1.' node.inputs) == 1, 'The count of ReLU node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.relu_param params = node.layer.relu_param
if params.HasField('negative_slope') and params.negative_slope != 0: if params.HasField('negative_slope') and params.negative_slope != 0:
...@@ -483,7 +483,7 @@ class CaffeOpMapper(OpMapper): ...@@ -483,7 +483,7 @@ class CaffeOpMapper(OpMapper):
def PReLU(self, node): def PReLU(self, node):
assert len( assert len(
node.inputs) == 1, 'The count of PReLU node\'s input is not 1.' node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.prelu_param params = node.layer.prelu_param
mode_bool = params.channel_shared mode_bool = params.channel_shared
if mode_bool: if mode_bool:
...@@ -511,10 +511,10 @@ class CaffeOpMapper(OpMapper): ...@@ -511,10 +511,10 @@ class CaffeOpMapper(OpMapper):
inputs_dict = dict() inputs_dict = dict()
for i, shape in enumerate(node.input_shape): for i, shape in enumerate(node.input_shape):
if shape[1] == 1: if shape[1] == 1:
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_input_node(node, idx=i, copy=True)
inputs_dict["label"] = self.get_input_name(input) inputs_dict["label"] = self.get_input_name(input)
else: else:
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_input_node(node, idx=i, copy=True)
inputs_dict["input"] = self.get_input_name(input) inputs_dict["input"] = self.get_input_name(input)
params = node.layer.accuracy_param params = node.layer.accuracy_param
top_k = params.top_k top_k = params.top_k
...@@ -534,9 +534,9 @@ class CaffeOpMapper(OpMapper): ...@@ -534,9 +534,9 @@ class CaffeOpMapper(OpMapper):
params = node.layer.eltwise_param params = node.layer.eltwise_param
mode = params.operation mode = params.operation
inputs = [] inputs = []
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
inputs.append(input0) inputs.append(input0)
input1 = self.graph.get_bottom_node(node, idx=1, copy=True) input1 = self.graph.get_input_node(node, idx=1, copy=True)
inputs.append(input1) inputs.append(input1)
if mode == 0: if mode == 0:
inputs_dict = {} inputs_dict = {}
...@@ -606,7 +606,7 @@ class CaffeOpMapper(OpMapper): ...@@ -606,7 +606,7 @@ class CaffeOpMapper(OpMapper):
def BatchNorm(self, node): def BatchNorm(self, node):
assert len( assert len(
node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.' node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.batch_norm_param params = node.layer.batch_norm_param
if hasattr(params, 'eps'): if hasattr(params, 'eps'):
eps = params.eps eps = params.eps
...@@ -670,8 +670,8 @@ class CaffeOpMapper(OpMapper): ...@@ -670,8 +670,8 @@ class CaffeOpMapper(OpMapper):
# for two tensor, here resets axis to 1. Maybe there is a bug for unkown case. # for two tensor, here resets axis to 1. Maybe there is a bug for unkown case.
axis = 1 axis = 1
bias_shape = node.input_shape[0][axis:axis + num_axes] bias_shape = node.input_shape[0][axis:axis + num_axes]
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
input1 = self.graph.get_bottom_node(node, idx=1, copy=True) input1 = self.graph.get_input_node(node, idx=1, copy=True)
inputs_dict = {} inputs_dict = {}
inputs_dict['x'] = self.get_input_name(input0) inputs_dict['x'] = self.get_input_name(input0)
inputs_dict['y'] = self.get_input_name(input1) inputs_dict['y'] = self.get_input_name(input1)
...@@ -682,7 +682,7 @@ class CaffeOpMapper(OpMapper): ...@@ -682,7 +682,7 @@ class CaffeOpMapper(OpMapper):
axis=axis) axis=axis)
else: else:
bias_shape = node.input_shape[0][axis:axis + num_axes] bias_shape = node.input_shape[0][axis:axis + num_axes]
input0 = self.graph.get_bottom_node(node, idx=0, copy=True) input0 = self.graph.get_input_node(node, idx=0, copy=True)
input0_name = self.get_input_name(input0) input0_name = self.get_input_name(input0)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.ParamAttr", kernel="fluid.ParamAttr",
...@@ -739,7 +739,7 @@ class CaffeOpMapper(OpMapper): ...@@ -739,7 +739,7 @@ class CaffeOpMapper(OpMapper):
def Reshape(self, node): def Reshape(self, node):
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
top_count = len(input.layer.top) top_count = len(input.layer.top)
is_inplace = False if top_count == 1 else True is_inplace = False if top_count == 1 else True
output_shape = node.output_shape[0] output_shape = node.output_shape[0]
...@@ -759,7 +759,7 @@ class CaffeOpMapper(OpMapper): ...@@ -759,7 +759,7 @@ class CaffeOpMapper(OpMapper):
assert len(node.inputs) == 1 and len( assert len(node.inputs) == 1 and len(
node.outputs node.outputs
) == 1, 'The count of ArgMax node\'s input and output is not 1.' ) == 1, 'The count of ArgMax node\'s input and output is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
input_shape = node.input_shape[0] input_shape = node.input_shape[0]
params = node.layer.argmax_param params = node.layer.argmax_param
out_max_val = params.out_max_val if hasattr(params, out_max_val = params.out_max_val if hasattr(params,
...@@ -796,8 +796,8 @@ class CaffeOpMapper(OpMapper): ...@@ -796,8 +796,8 @@ class CaffeOpMapper(OpMapper):
def Crop(self, node): def Crop(self, node):
assert len( assert len(
node.inputs) == 2, 'The count of Crop node\'s input is not 2.' node.inputs) == 2, 'The count of Crop node\'s input is not 2.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
example = self.graph.get_bottom_node(node, idx=1, copy=True) example = self.graph.get_input_node(node, idx=1, copy=True)
params = node.layer.crop_param params = node.layer.crop_param
axis = params.axis axis = params.axis
input_shape = node.input_shape[0] input_shape = node.input_shape[0]
...@@ -822,7 +822,7 @@ class CaffeOpMapper(OpMapper): ...@@ -822,7 +822,7 @@ class CaffeOpMapper(OpMapper):
assert len( assert len(
node. node.
inputs) == 1, 'The count of DetectionOutput node\'s input is not 1.' inputs) == 1, 'The count of DetectionOutput node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel="fluid.layers.reshape", kernel="fluid.layers.reshape",
inputs={"x": self.get_input_name(input)}, inputs={"x": self.get_input_name(input)},
...@@ -832,7 +832,7 @@ class CaffeOpMapper(OpMapper): ...@@ -832,7 +832,7 @@ class CaffeOpMapper(OpMapper):
def Power(self, node): def Power(self, node):
assert len( assert len(
node.inputs) == 1, 'The count of Permute node\'s input is not 1.' node.inputs) == 1, 'The count of Permute node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.power_param params = node.layer.power_param
power = params.power power = params.power
scale = params.scale scale = params.scale
...@@ -857,7 +857,7 @@ class CaffeOpMapper(OpMapper): ...@@ -857,7 +857,7 @@ class CaffeOpMapper(OpMapper):
def Reduction(self, node): def Reduction(self, node):
assert len( assert len(
node.inputs) == 1, 'The count of Reduction node\'s input is not 1.' node.inputs) == 1, 'The count of Reduction node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
params = node.layer.reduction_param params = node.layer.reduction_param
operation = params.operation operation = params.operation
axis = params.axis axis = params.axis
...@@ -942,15 +942,15 @@ class CaffeOpMapper(OpMapper): ...@@ -942,15 +942,15 @@ class CaffeOpMapper(OpMapper):
self.weights[weights_name[i]] = data[i] self.weights[weights_name[i]] = data[i]
inputs_list = [] inputs_list = []
for i in range(len(node.inputs)): for i in range(len(node.inputs)):
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_input_node(node, idx=i, copy=True)
if i == 1 and op == 'DetectionOutput': if i == 1 and op == 'DetectionOutput':
input = self.graph.get_bottom_node(node, idx=i, copy=True) input = self.graph.get_input_node(node, idx=i, copy=True)
while input is not None \ while input is not None \
and input.layer_type != 'Softmax' \ and input.layer_type != 'Softmax' \
and input.layer_type != 'Sigmoid': and input.layer_type != 'Sigmoid':
input = self.graph.get_bottom_node(input, idx=0, copy=True) input = self.graph.get_input_node(input, idx=0, copy=True)
assert input is not None, 'This kind of DetectionOutput is not supported!' assert input is not None, 'This kind of DetectionOutput is not supported!'
input = self.graph.get_bottom_node(input, idx=0, copy=True) input = self.graph.get_input_node(input, idx=0, copy=True)
inputs_list.append(self.get_input_name(input)) inputs_list.append(self.get_input_name(input))
kwargs_tmp = copy.deepcopy(kwargs) kwargs_tmp = copy.deepcopy(kwargs)
for k, v in kwargs_tmp.items(): for k, v in kwargs_tmp.items():
...@@ -970,7 +970,7 @@ class CaffeOpMapper(OpMapper): ...@@ -970,7 +970,7 @@ class CaffeOpMapper(OpMapper):
def directly_map(self, node): def directly_map(self, node):
assert node.layer_type in self.directly_map_ops assert node.layer_type in self.directly_map_ops
op_info = self.directly_map_ops[node.layer_type] op_info = self.directly_map_ops[node.layer_type]
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
kernel=op_info, kernel=op_info,
inputs={"x": self.get_input_name(input)}, inputs={"x": self.get_input_name(input)},
......
...@@ -359,7 +359,7 @@ class TFOpMapper(OpMapper): ...@@ -359,7 +359,7 @@ class TFOpMapper(OpMapper):
kernel_value = kernel.value kernel_value = kernel.value
kernel_weight_name = kernel.name.replace('/', '_') kernel_weight_name = kernel.name.replace('/', '_')
else: else:
kernel_value = self.decoder.infer_tensor(kernel) kernel_value = self.decoder.infer_tensor(kernel, use_diff_inputs=False)
if kernel.layer_type == 'Split': if kernel.layer_type == 'Split':
kernel_weight_name = "{}_{}_kernel".format(node.name, kernel_weight_name = "{}_{}_kernel".format(node.name,
kernel.name) kernel.name)
...@@ -781,15 +781,15 @@ class TFOpMapper(OpMapper): ...@@ -781,15 +781,15 @@ class TFOpMapper(OpMapper):
if strides.layer_type == "Const": if strides.layer_type == "Const":
strides = strides.value.tolist() strides = strides.value.tolist()
else: else:
strides = self.decoder.infer_shape_tensor(strides) strides = self.decoder.infer_tensor(strides)
if begin.layer_type == "Const": if begin.layer_type == "Const":
begin = begin.value.tolist() begin = begin.value.tolist()
else: else:
begin = self.decoder.infer_shape_tensor(begin) begin = self.decoder.infer_tensor(begin)
if end.layer_type == "Const": if end.layer_type == "Const":
end = end.value.tolist() end = end.value.tolist()
else: else:
end = self.decoder.infer_shape_tensor(end) end = self.decoder.infer_tensor(end)
assert len(set(strides)) == 1 and strides[ assert len(set(strides)) == 1 and strides[
0] == 1, "Only support strides be 1 in StridedSlice OP" 0] == 1, "Only support strides be 1 in StridedSlice OP"
...@@ -1066,15 +1066,15 @@ class TFOpMapper(OpMapper): ...@@ -1066,15 +1066,15 @@ class TFOpMapper(OpMapper):
if out_shape.layer_type == "Const": if out_shape.layer_type == "Const":
out_shape = out_shape.value.tolist() out_shape = out_shape.value.tolist()
else: else:
out_shape = self.decoder.infer_shape_tensor(out_shape, out_shape = self.decoder.infer_tensor(out_shape,
node.out_shapes[0]) out_shape=node.out_shapes[0])
in_shape = input.out_shapes[0] in_shape = input.out_shapes[0]
if in_shape.count(-1) > 2: if in_shape.count(-1) > 2:
in_shape = self.decoder.infer_tensor(input).shape in_shape = self.decoder.infer_tensor(input, use_diff_inputs=False).shape
k_size = kernel.out_shapes[0] k_size = kernel.out_shapes[0]
if k_size.count(-1) > 2: if k_size.count(-1) > 2:
k_size = self.decoder.infer_tensor(kernel).shape k_size = self.decoder.infer_tensor(input, use_diff_inputs=False).shape
pad_mode = node.get_attr("padding").decode() pad_mode = node.get_attr("padding").decode()
strides = node.get_attr("strides") strides = node.get_attr("strides")
......
# -*- coding:UTF-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License" # Licensed under the Apache License, Version 2.0 (the "License"
......
# -*- coding:UTF-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License" # Licensed under the Apache License, Version 2.0 (the "License"
......
# -*- coding:UTF-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License" # Licensed under the Apache License, Version 2.0 (the "License"
......
# -*- coding:UTF-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License" # Licensed under the Apache License, Version 2.0 (the "License"
......
# -*- coding:UTF-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License" # Licensed under the Apache License, Version 2.0 (the "License"
......
# -*- coding:UTF-8 -*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License" # Licensed under the Apache License, Version 2.0 (the "License"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册