提交 16df89ef 编写于 作者: S SunAhong1993

fix

上级 a02ad500
...@@ -210,8 +210,8 @@ class PaddleGraph(object): ...@@ -210,8 +210,8 @@ class PaddleGraph(object):
if self.edges_in.get(layer_id, 0) == 0 and self.edges_out.get( if self.edges_in.get(layer_id, 0) == 0 and self.edges_out.get(
layer_id, 0) == 0 and layer.kernel != "prim.assert" \ layer_id, 0) == 0 and layer.kernel != "prim.assert" \
and layer.kernel != "prim.exception" \ and layer.kernel != "prim.exception" \
and layer.kernel != "prim.warnings": and layer.kernel != "prim.warnings" and layer.outputs[0] not in self.outputs:
if layer.kernel == "paddle.to_tensor": if layer.kernel == "paddle.to_tensor" and layer.outputs[0] in self.inputs_info:
self.inputs_info.pop(layer.outputs[0]) self.inputs_info.pop(layer.outputs[0])
invalid_list.append(layer_id) invalid_list.append(layer_id)
for layer_id in invalid_list: for layer_id in invalid_list:
......
...@@ -234,11 +234,16 @@ class ONNXGraph(Graph): ...@@ -234,11 +234,16 @@ class ONNXGraph(Graph):
""" """
generate output_nodes node of ONNX model generate output_nodes node of ONNX model
""" """
inner_nodes = self.get_inner_nodes() # inner_nodes = self.get_inner_nodes()
output_nodes = [value.name for value in self.graph.output] output_nodes = [value.name for value in self.graph.output]
# for opt_data in output_nodes:
# if opt_data not in inner_nodes:
# self.output_nodes.append(opt_data)
for opt_data in output_nodes: for opt_data in output_nodes:
if opt_data not in inner_nodes: n = super(ONNXGraph, self).get_node(opt_data)
self.output_nodes.append(opt_data) if n is None:
self.topo_sort.append(self.node_map[opt_data])
self.output_nodes.append(opt_data)
def is_place_holder_nodes(self, layer): def is_place_holder_nodes(self, layer):
""" """
...@@ -403,7 +408,7 @@ class ONNXDecoder(object): ...@@ -403,7 +408,7 @@ class ONNXDecoder(object):
check_model(onnx_model) check_model(onnx_model)
onnx_model = self.optimize_model_skip_op(onnx_model) onnx_model = self.optimize_model_skip_op(onnx_model)
onnx_model = self.optimize_model_strip_initializer(onnx_model) # onnx_model = self.optimize_model_strip_initializer(onnx_model)
onnx_model = self.optimize_node_name(onnx_model) onnx_model = self.optimize_node_name(onnx_model)
self.graph = ONNXGraph(onnx_model) self.graph = ONNXGraph(onnx_model)
#self.onnx_model = onnx_model #self.onnx_model = onnx_model
......
...@@ -14,4 +14,6 @@ ...@@ -14,4 +14,6 @@
from .one_hot import OneHot from .one_hot import OneHot
from .pad import CustomPad from .pad_two_input import PadWithTwoInput
\ No newline at end of file from .pad_all_dim2 import PadAllDim2
from .pad_all_dim4 import PadAllDim4
\ No newline at end of file
...@@ -19,30 +19,17 @@ class OneHot(object): ...@@ -19,30 +19,17 @@ class OneHot(object):
self.axis = axis self.axis = axis
def __call__(self, indices, depth, values): def __call__(self, indices, depth, values):
indices_shape = paddle.shape(indices) indices_shape = indices.shape
tmp = paddle.ones_like(indices_shape, dtype="int32") rank = len(indices.shape)
rank = paddle.sum(tmp) real_axis = self.axis
if self.axis < 0:
real_axis = self.axis + rank + 1
depth_range = paddle.arange(end=depth) depth_range = paddle.arange(end=depth)
zero = paddle.zeros([1], dtype="int32") ls = tuple(indices_shape[0: real_axis])
one = paddle.ones([1], dtype="int32") rs = tuple(indices_shape[real_axis: rank])
axis = self.axis * one targets = paddle.reshape(depth_range, (1,) * (real_axis-0) + tuple(depth_range.shape) + (1,) * (rank-real_axis))
new_axis = axis + rank + 1
cond = paddle.less_than(axis, zero)
real_axis = paddle.where(cond, new_axis, axis)
ls = paddle.slice(indices_shape, axes=[0], starts=[0], ends=real_axis)
rs = paddle.slice(indices_shape, axes=[0], starts=real_axis, ends=rank)
tmp = paddle.ones_like(ls, dtype="int32")
ls_len = paddle.sum(tmp)
ls_list = paddle.ones(ls_len, dtype="int32")
tmp = paddle.ones_like(rs, dtype="int32")
rs_len = paddle.sum(tmp)
rs_list = paddle.ones(rs_len, dtype="int32")
depth_range_shape = paddle.shape(depth_range)
targets_shape = paddle.concat([ls_list, depth_range_shape, rs_list], axis=0)
targets = paddle.reshape(depth_range, targets_shape)
mod = paddle.mod(indices, depth) mod = paddle.mod(indices, depth)
v_shape = paddle.concat([ls, paddle.shape(one), rs], axis=0) v = paddle.reshape(mod, ls + (1,) + rs)
v = paddle.reshape(mod, v_shape)
out = targets == v out = targets == v
out = paddle.cast(out, "float32") out = paddle.cast(out, "float32")
on_value = paddle.slice(values, axes=[0], starts=[1], ends=[2]) on_value = paddle.slice(values, axes=[0], starts=[1], ends=[2])
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
from x2paddle.core.util import *
class PadAllDim2(object):
def __init__(self, value, mode):
self.layer_attrs = {}
self.layer_attrs['mode'] = mode
self.layer_attrs['data_format'] = 'NCHW'
self.layer_attrs['value'] = value
def __call__(self, x, pad):
pad = paddle.reshape(pad, shape=[2, -1])
pad = paddle.transpose(pad, perm=[1, 0])
pad = paddle.reverse(pad, axis=[0])
pad = paddle.flatten(pad)
pad = paddle.cast(pad, dtype="int32")
x = paddle.unsqueeze(x, axis=[0, 1])
out = paddle.nn.functional.pad(x=x, pad=pad, **self.layer_attrs)
out = paddle.squeeze(out, axis=[0, 1])
return out
\ No newline at end of file
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
from x2paddle.core.util import *
class PadAllDim4(object):
def __init__(self, value, mode):
self.layer_attrs = {}
self.layer_attrs['mode'] = mode
self.layer_attrs['data_format'] = 'NCHW'
self.layer_attrs['value'] = value
def __call__(self, x, pad):
pad = paddle.reshape(pad, shape=[2, -1])
pad = paddle.transpose(pad, perm=[1, 0])
pad = paddle.reverse(pad, axis=[0])
pad = paddle.flatten(pad)
pad = paddle.cast(pad, dtype="int32")
pad1, pad2 = paddle.split(pad, num_or_sections=2, axis=0)
x = paddle.nn.functional.pad(x=x, pad=pad1, **self.layer_attrs)
x = paddle.transpose(x, perm=[2, 3, 0, 1])
x = paddle.nn.functional.pad(x=x, pad=pad2, **self.layer_attrs)
out = paddle.transpose(x, perm=[2, 3, 0, 1])
return out
\ No newline at end of file
...@@ -13,12 +13,13 @@ ...@@ -13,12 +13,13 @@
# limitations under the License. # limitations under the License.
import paddle import paddle
from x2paddle.core.util import *
class CustomPad(object): class PadWithTwoInput(object):
def __init__(self, value, mode): def __init__(self, value, mode, data_format):
self.layer_attrs = {} self.layer_attrs = {}
self.layer_attrs['mode'] = string(mode) self.layer_attrs['mode'] = mode
self.layer_attrs['data_format'] = string('NCHW') self.layer_attrs['data_format'] = data_format
self.layer_attrs['value'] = value self.layer_attrs['value'] = value
...@@ -27,5 +28,6 @@ class CustomPad(object): ...@@ -27,5 +28,6 @@ class CustomPad(object):
pad = paddle.transpose(pad, perm=[1, 0]) pad = paddle.transpose(pad, perm=[1, 0])
pad = paddle.reverse(pad, axis=[0]) pad = paddle.reverse(pad, axis=[0])
pad = paddle.flatten(pad) pad = paddle.flatten(pad)
pad = paddle.cast(pad, dtype="int32")
out = paddle.nn.functional.pad(x=x, pad=pad, **self.layer_attrs) out = paddle.nn.functional.pad(x=x, pad=pad, **self.layer_attrs)
return out return out
\ No newline at end of file
...@@ -142,6 +142,7 @@ class OpSet9(): ...@@ -142,6 +142,7 @@ class OpSet9():
self.inputs_info = dict() self.inputs_info = dict()
self.weights = dict() self.weights = dict()
self.nn_name2id = dict() self.nn_name2id = dict()
self.done_weight_list = list()
@print_mapping_info @print_mapping_info
def directly_map(self, node, *args, **kwargs): def directly_map(self, node, *args, **kwargs):
...@@ -232,8 +233,7 @@ class OpSet9(): ...@@ -232,8 +233,7 @@ class OpSet9():
shape=shape, shape=shape,
attr=string(node.name), attr=string(node.name),
dtype=string(dtype), dtype=string(dtype),
default_initializer="paddle.nn.initializer.Constant(value=0.0)") default_initializer="paddle.nn.initializer.Constant(value=0.0)")
def _pad_if_asymmetric(self, node, pads, val_name): # pads: SSEE def _pad_if_asymmetric(self, node, pads, val_name): # pads: SSEE
assert len(pads) & 1 == 0 assert len(pads) & 1 == 0
...@@ -394,78 +394,111 @@ class OpSet9(): ...@@ -394,78 +394,111 @@ class OpSet9():
value = node.get_attr('value', 0.) value = node.get_attr('value', 0.)
data_shape = val_x.out_shapes[0] data_shape = val_x.out_shapes[0]
output_shape = node.out_shapes[0] output_shape = node.out_shapes[0]
assume_pad2d = False assume_pad = False
layer_attrs = {} layer_attrs = {}
layer_attrs['mode'] = string(mode) layer_attrs['mode'] = string(mode)
layer_attrs['value'] = value
if not op_independent:
output_name = node.name + '_paded'
else:
output_name = node.name
nn_op_name = name_generator("pad", self.nn_name2id)
layer_outputs = [nn_op_name, output_name]
if is_pads_attr: if is_pads_attr:
paddings = [] paddings = []
if len(pads) == 4: if len(pads) in [2, 4, 6]:
assume_pad2d |= mode != 'constant'
if data_shape: if data_shape:
assume_pad2d |= data_shape and len(data_shape) == 4 # NCHW assume_pad |= data_shape and 2 * (len(data_shape) - 2) == len(pads) # NCHW
if output_shape: if output_shape:
assume_pad2d |= output_shape and len(output_shape) == 4 # NCHW assume_pad |= output_shape and 2 * (len(output_shape) - 2) == len(pads) # NCHW
if assume_pad2d: if assume_pad:
paddle_op = 'paddle.nn.Pad2D' paddle_op = 'paddle.nn.Pad{}D'.format(len(output_shape) - 2)
layer_attrs['data_format'] = string('NCHW') paddings = np.array(pads).reshape(
layer_attrs['value'] = value (2, -1)).transpose().astype("int32")
else: paddings = np.flip(paddings).flatten().tolist()
paddle_op = 'paddle.fluid.layers.pad' layer_attrs['padding'] = paddings
layer_attrs["pad_value"] = value else:
if len(pads) == 4: if data_shape:
paddings = np.array(pads).reshape( assume_pad |= data_shape and 2 * len(data_shape) == len(pads) # NCHW
(-1, 2)).transpose().flatten().tolist() # SSEE -> SESE if output_shape:
assume_pad |= output_shape and 2 * len(output_shape) == len(pads) # NCHW
if assume_pad:
paddle_op = 'paddle.nn.functional.pad'
paddings = np.array(pads).reshape(
(2, -1)).transpose().astype("int32").flatten().tolist()
layer_attrs['pad'] = paddings
else:
raise Exception("The padding value {} is wrong!".format(pads))
elif len(pads) == 8: elif len(pads) == 8:
paddings = np.array(pads).reshape( if data_shape:
(-1, 4)).transpose().flatten().tolist() # SSEE -> SESE assume_pad |= data_shape and 2 * len(data_shape) == len(pads) # NCHW
if sum(paddings[:4]) == 0: if output_shape:
paddle_op = 'paddle.nn.Pad2D' assume_pad |= output_shape and 2 * len(output_shape) == len(pads) # NCHW
paddings = paddings[4:] if assume_pad:
layer_attrs['value'] = value paddle_op = 'paddle.nn.functional.pad'
if 'pad_value' in layer_attrs: paddings = np.array(pads).reshape(
layer_attrs.pop('pad_value') (2, -1)).transpose().astype("int32").flatten().tolist()
tmp_paddings = copy.deepcopy(paddings) layer_attrs['pad'] = paddings
paddings[0] = tmp_paddings[2]
paddings[1] = tmp_paddings[3]
paddings[2] = tmp_paddings[0]
paddings[3] = tmp_paddings[1]
if paddle_op == 'paddle.nn.Pad2D':
layer_attrs['padding'] = paddings
nn_op_name = name_generator("pad2d", self.nn_name2id)
else:
layer_attrs['paddings'] = paddings
if op_independent:
self.paddle_graph.add_layer(
paddle_op,
inputs={'x': val_x.name},
outputs=[nn_op_name, node.name] if paddle_op == 'paddle.nn.Pad2D' else [node.name],
**layer_attrs)
else: else:
self.paddle_graph.add_layer( raise Exception("The padding value {} is wrong!".format(pads))
paddle_op, self.paddle_graph.add_layer(
inputs={'x': val_x.name}, paddle_op,
outputs=[nn_op_name, node.name + '_paded'] if paddle_op == 'paddle.nn.Pad2D' \ inputs={'x': val_x.name},
else [node.name + '_paded'], outputs=layer_outputs[1:] if paddle_op == 'paddle.nn.functional.pad' else layer_outputs,
**layer_attrs) **layer_attrs)
if not op_independent:
return node.name + '_paded' return node.name + '_paded'
else: else:
if pad_shape[0] == 4: pads_len = val_pad.out_shapes[0][0]
assume_pad2d |= mode != 'constant' if pads_len in [2, 4, 6]:
if data_shape: if data_shape:
assume_pad2d |= data_shape and len(data_shape) == 4 # NCHW assume_pad |= data_shape and 2 * (len(data_shape) - 2) == pads_len # NCHW
if output_shape: if output_shape:
assume_pad2d |= output_shape and len(output_shape) == 4 # NCHW assume_pad |= output_shape and 2 * (len(output_shape) - 2) == pads_len # NCHW
if pad_shape[0] == 8 or not assume_pad2d: if assume_pad:
raise Exception("When the pad shape is 8 and pad is tensor, the op is not supported yet!") if pads_len == 2:
nn_op_name = name_generator("custom_pad", self.nn_name2id) data_format = "NCL"
output_name = node.name + '_paded' elif pads_len == 4:
layer_outputs = [nn_op_name, output_name] data_format = "NCHW"
layer_attrs['value'] = value else:
self.paddle_graph.add_layer( data_format = "NCDHW"
"custom_layer:CustomPad", self.paddle_graph.add_layer(
inputs={'x': val_x.name, 'pad': val_pad.name}, "custom_layer:PadWithTwoInput",
outputs=layer_outputs, inputs={'x': val_x.name, 'pad': val_pad.name},
**layer_attrs) outputs=layer_outputs,
value=value,
mode=string(mode),
data_format=string(data_format))
else:
if data_shape:
assume_pad |= data_shape and 2 * len(data_shape) == pads_len # NCHW
if output_shape:
assume_pad |= output_shape and 2 * len(output_shape) == pads_len # NCHW
if assume_pad:
if pads_len == 4:
self.paddle_graph.add_layer(
"custom_layer:PadAllDim2",
inputs={'x': val_x.name, 'pad': val_pad.name},
outputs=layer_outputs,
value=value,
mode=string(mode))
else:
raise Exception("The padding value is wrong!")
elif pads_len == 8:
if data_shape:
assume_pad |= data_shape and 2 * len(data_shape) == pads_len # NCHW
if output_shape:
assume_pad |= output_shape and 2 * len(output_shape) == pads_len # NCHW
if assume_pad:
self.paddle_graph.add_layer(
"custom_layer:PadAllDim4",
inputs={'x': val_x.name, 'pad': val_pad.name},
outputs=layer_outputs,
value=value,
mode=string(mode))
else:
print(pads_len)
raise Exception("The padding value is wrong!")
if not op_independent: if not op_independent:
return node.name + '_paded' return node.name + '_paded'
...@@ -678,8 +711,9 @@ class OpSet9(): ...@@ -678,8 +711,9 @@ class OpSet9():
'paddle.nn.Embedding', 'paddle.nn.Embedding',
inputs={"x": indices_cast}, inputs={"x": indices_cast},
outputs=layer_outputs, outputs=layer_outputs,
param_attr=string(val_x.name), weight_attr=string(val_x.name),
size=val_x.out_shapes[0]) num_embeddings=val_x.out_shapes[0][0],
embedding_dim=val_x.out_shapes[0][1])
else: else:
from functools import reduce from functools import reduce
reshape_shape = reduce(lambda x, y: x * y, indices_shape) reshape_shape = reduce(lambda x, y: x * y, indices_shape)
...@@ -851,14 +885,21 @@ class OpSet9(): ...@@ -851,14 +885,21 @@ class OpSet9():
starts = self.graph.get_input_node(node, idx=1, copy=True) starts = self.graph.get_input_node(node, idx=1, copy=True)
ends = self.graph.get_input_node(node, idx=2, copy=True) ends = self.graph.get_input_node(node, idx=2, copy=True)
starts_value = _const_weight_or_none(starts) starts_value = _const_weight_or_none(starts)
if starts_value is not None:
starts_value = starts_value.tolist()
ends_value = _const_weight_or_none(ends) ends_value = _const_weight_or_none(ends)
if ends_value is not None:
ends_value = ends_value.tolist()
if len(node.inputs) > 2:
s_len = len(val_x.out_shapes[0])
axes = list(range(s_len))
if len(node.inputs) > 3: if len(node.inputs) > 3:
axes = self.graph.get_input_node(node, idx=3, copy=True) axes_node = self.graph.get_input_node(node, idx=3, copy=True)
axes = _const_weight_or_none(axes, necessary=True) axes = _const_weight_or_none(axes_node, necessary=True).tolist()
if len(node.inputs) > 4: if len(node.inputs) > 4:
steps = self.graph.get_input_node(node, idx=4, copy=True) steps = self.graph.get_input_node(node, idx=4, copy=True)
steps = _const_weight_or_none(steps) steps = _const_weight_or_none(steps).tolist()
layer_attrs = { layer_attrs = {
"axes": axes, "axes": axes,
"starts": starts.name, "starts": starts.name,
...@@ -911,6 +952,7 @@ class OpSet9(): ...@@ -911,6 +952,7 @@ class OpSet9():
ends[idx] = 2**31 - 1 ends[idx] = 2**31 - 1
layer_attrs = {"axes": axes, "starts": starts, "ends": ends} layer_attrs = {"axes": axes, "starts": starts, "ends": ends}
if steps is not None: if steps is not None:
layer_attrs['strides'] = steps layer_attrs['strides'] = steps
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
...@@ -1036,6 +1078,12 @@ class OpSet9(): ...@@ -1036,6 +1078,12 @@ class OpSet9():
inputs={'x': val_shape.name}, inputs={'x': val_shape.name},
outputs=[val_shape.name], outputs=[val_shape.name],
shape=val_shape.out_shapes[0]) shape=val_shape.out_shapes[0])
if val_shape.dtype != "int32":
self.paddle_graph.add_layer(
'paddle.cast',
inputs={'x': val_shape.name},
outputs=[val_shape.name],
dtype=string("int32"))
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
'paddle.reshape', 'paddle.reshape',
inputs={'x': val_x.name, inputs={'x': val_x.name,
...@@ -1280,7 +1328,10 @@ class OpSet9(): ...@@ -1280,7 +1328,10 @@ class OpSet9():
@print_mapping_info @print_mapping_info
def Transpose(self, node): def Transpose(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
perm = node.get_attr('perm') s_len = len(val_x.out_shapes[0])
perm_default = list(range(s_len))
perm_default.reverse()
perm = node.get_attr('perm', perm_default)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.transpose", "paddle.transpose",
inputs={"x": val_x.name}, inputs={"x": val_x.name},
...@@ -1584,6 +1635,7 @@ class OpSet9(): ...@@ -1584,6 +1635,7 @@ class OpSet9():
strides[1]) strides[1])
paddings = pad_h + pad_w paddings = pad_h + pad_w
layer_inputs = {'x': val_x if isinstance(val_x, str) else val_x.name}
layer_attrs = { layer_attrs = {
"in_channels": num_in_channels * num_groups, "in_channels": num_in_channels * num_groups,
"out_channels": num_out_channels, "out_channels": num_out_channels,
...@@ -1592,15 +1644,25 @@ class OpSet9(): ...@@ -1592,15 +1644,25 @@ class OpSet9():
"padding": paddings, "padding": paddings,
"dilation": dilations, "dilation": dilations,
"groups": num_groups, "groups": num_groups,
'weight_attr': string(val_w.name),
} }
val_w_name = val_w.name
while val_w_name in self.done_weight_list:
val_w_name += "__repeat"
self.done_weight_list.append(val_w_name)
layer_attrs["weight_attr"] = string(val_w_name)
self.weights[val_w_name] = self.weights[val_w.name]
if has_bias: if has_bias:
layer_attrs["bias_attr"] = string(val_b.name) val_b_name = val_b.name
while val_b_name in self.done_weight_list:
val_b_name += "__repeat"
self.done_weight_list.append(val_b_name)
layer_attrs["bias_attr"] = string(val_b_name)
self.weights[val_b_name] = self.weights[val_b.name]
else: else:
layer_attrs["bias_attr"] = False layer_attrs["bias_attr"] = False
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
paddle_op, paddle_op,
inputs={'x': val_x if isinstance(val_x, str) else val_x.name}, inputs=layer_inputs,
outputs=layer_outputs, outputs=layer_outputs,
**layer_attrs) **layer_attrs)
...@@ -1674,8 +1736,13 @@ class OpSet9(): ...@@ -1674,8 +1736,13 @@ class OpSet9():
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.shape", "paddle.shape",
inputs={"x": val_x.name}, inputs={"input": val_x.name},
outputs=[node.name]) outputs=[node.name])
self.paddle_graph.add_layer(
'paddle.cast',
inputs={"x": node.name},
outputs=[node.name],
dtype=string('int64'))
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.prod", "paddle.prod",
inputs={"x": node.name}, inputs={"x": node.name},
...@@ -1684,10 +1751,22 @@ class OpSet9(): ...@@ -1684,10 +1751,22 @@ class OpSet9():
@print_mapping_info @print_mapping_info
def Sign(self, node): def Sign(self, node):
val_x = self.graph.get_input_node(node, idx=0, copy=True) val_x = self.graph.get_input_node(node, idx=0, copy=True)
if node.dtype not in ["float16", "float32", "float64"]:
self.paddle_graph.add_layer(
"paddle.cast",
inputs={"x": val_x.name},
outputs=[val_x.name],
dtype=string("float32"))
self.paddle_graph.add_layer( self.paddle_graph.add_layer(
"paddle.sign", "paddle.sign",
inputs={"x": val_x.name}, inputs={"x": val_x.name},
outputs=[node.name]) outputs=[node.name])
if node.dtype not in ["float16", "float32", "float64"]:
self.paddle_graph.add_layer(
"paddle.cast",
inputs={"x": node.name},
outputs=[node.name],
dtype=string(node.dtype))
@print_mapping_info @print_mapping_info
def OneHot(self, node): def OneHot(self, node):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册