提交 d43f75b2 编写于 作者: C Channingss

add clip,tanh,log

上级 9b8cd312
...@@ -41,32 +41,18 @@ class OpSet11(OpSet10): ...@@ -41,32 +41,18 @@ class OpSet11(OpSet10):
outputs=op.output('Out'), ) outputs=op.output('Out'), )
return [min_node, max_node, node] return [min_node, max_node, node]
def pad2d(self, op, block): def clip(self, op, block):
x_shape = block.var(op.input('X')[0]).shape min_name = self.get_name(op.type, 'min')
paddings = op.attr('paddings') max_name = self.get_name(op.type, 'max')
onnx_pads = [] min_node = self.make_constant_node(min_name, onnx_pb.TensorProto.FLOAT,
#TODO support pads is Variable op.attr('min'))
if op.attr('data_format') == 'NCHW': max_node = self.make_constant_node(max_name, onnx_pb.TensorProto.FLOAT,
pads = [ op.attr('max'))
0, 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3]
]
else:
pads = [
0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3], 0
]
pads_name = self.get_name(op.type, 'pads')
pads_node = self.make_constant_node(pads_name,
onnx_pb.TensorProto.INT64, pads)
constant_value_name = self.get_name(op.type, 'constant_value')
constant_value_node = self.make_constant_node(constant_value_name,
onnx_pb.TensorProto.FLOAT,
op.attr('pad_value'))
node = helper.make_node( node = helper.make_node(
'Pad', 'Clip',
inputs=op.input('X') + [pads_name, constant_value_name], inputs=[op.input('X')[0], min_name, max_name],
outputs=op.output('Out'), outputs=op.output('Out'))
mode=op.attr('mode')) return [min_node, max_node, node]
return [pads_node, constant_value_node, node]
def bilinear_interp(self, op, block): def bilinear_interp(self, op, block):
input_names = op.input_names input_names = op.input_names
......
...@@ -59,7 +59,7 @@ class OpSet9(object): ...@@ -59,7 +59,7 @@ class OpSet9(object):
'Constant', inputs=[], outputs=[name], value=tensor) 'Constant', inputs=[], outputs=[name], value=tensor)
return node return node
def convert_weights(self, program, scope=None): def convert_weights(self, program):
var_names = program.global_block().vars var_names = program.global_block().vars
nodes = list() nodes = list()
for name in var_names: for name in var_names:
...@@ -68,7 +68,7 @@ class OpSet9(object): ...@@ -68,7 +68,7 @@ class OpSet9(object):
continue continue
if not var.persistable: if not var.persistable:
continue continue
weight = np.array(scope.find_var(name).get_tensor()) weight = np.array(fluid.global_scope().find_var(name).get_tensor())
tensor = helper.make_tensor( tensor = helper.make_tensor(
name=name, name=name,
dims=var.shape, dims=var.shape,
...@@ -110,11 +110,32 @@ class OpSet9(object): ...@@ -110,11 +110,32 @@ class OpSet9(object):
'Relu', inputs=op.input('X'), outputs=op.output('Out')) 'Relu', inputs=op.input('X'), outputs=op.output('Out'))
return node return node
def tanh(self, op, block):
node = helper.make_node(
'Tanh', inputs=op.input('X'), outputs=op.output('Out'))
return node
def log(self, op, block):
node = helper.make_node(
'Log', inputs=op.input('X'), outputs=op.output('Out'))
return node
def sigmoid(self, op, block): def sigmoid(self, op, block):
node = helper.make_node( node = helper.make_node(
'Sigmoid', inputs=op.input('X'), outputs=op.output('Out')) 'Sigmoid', inputs=op.input('X'), outputs=op.output('Out'))
return node return node
def clip(self, op, block):
min_value = op.attr('min')
max_value = op.attr('max')
node = helper.make_node(
'Clip',
inputs=[op.input('X')[0]],
outputs=op.output('Out'),
max=max_value,
min=min_value)
return node
def exp(self, op, block): def exp(self, op, block):
node = helper.make_node( node = helper.make_node(
'Exp', inputs=op.input('X'), outputs=op.output('Out')) 'Exp', inputs=op.input('X'), outputs=op.output('Out'))
...@@ -215,28 +236,6 @@ class OpSet9(object): ...@@ -215,28 +236,6 @@ class OpSet9(object):
pads=op.attr('paddings') + op.attr('paddings')) pads=op.attr('paddings') + op.attr('paddings'))
return node return node
def pad2d(self, op, block):
x_shape = block.var(op.input('X')[0]).shape
paddings = op.attr('paddings')
onnx_pads = []
if op.attr('data_format') == 'NCHW':
pads = [
0, 0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3]
]
else:
pads = [
0, paddings[0], paddings[2], 0, 0, paddings[1], paddings[3], 0
]
#TODO support pads is Variable
node = helper.make_node(
'Pad',
inputs=op.input('X'),
outputs=op.output('Out'),
mode=op.attr('mode'),
value=op.attr('pad_value'),
pads=pads)
return node
def softmax(self, op, block): def softmax(self, op, block):
axis = op.attr('axis') axis = op.attr('axis')
shape = block.var(op.output('Out')[0]).shape shape = block.var(op.output('Out')[0]).shape
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册