提交 43a78838 编写于 作者: W walloollaw 提交者: qingqing01

caffe2fluid: fix bug in leaky-relu and add interface for prelu conver… (#1086)

* caffe2fluid: fix bug in leaky-relu and add interface for prelu convertion
* caffe2fluid:fix code style problem in graph.py
上级 1ae49ef4
......@@ -24,15 +24,10 @@ def calc_diff(f1, f2):
#print d2.shape
#print d1[0, 0, 0:10, 0:10]
#print d2[0, 0, 0:10, 0:10]
#d1 = d1[:, :, 1:-2, 1:-2]
#d2 = d2[:, :, 1:-2, 1:-2]
d1 = d1.flatten()
d2 = d2.flatten()
#print d1[:10]
#print d2[:10]
d1_num = reduce(lambda x, y: x * y, d1.shape)
d2_num = reduce(lambda x, y: x * y, d2.shape)
if d1_num != d2_num:
......@@ -41,7 +36,11 @@ def calc_diff(f1, f2):
assert (d1_num == d2_num), "their shape is not consistent"
try:
mask = np.abs(d1) >= np.abs(d2)
mask = mask.astype('int32')
df = np.abs(d1 - d2)
df = df / (1.0e-10 + np.abs(d1) * mask + np.abs(d2) * (1 - mask))
max_df = np.max(df)
sq_df = np.mean(df * df)
return max_df, sq_df
......
......@@ -39,6 +39,7 @@ LAYER_DESCRIPTORS = {
'Pooling': shape_pool,
'Power': shape_identity,
'ReLU': shape_identity,
'PReLU': shape_identity,
'Scale': shape_identity,
'Sigmoid': shape_identity,
'SigmoidCrossEntropyLoss': shape_scalar,
......
......@@ -240,10 +240,16 @@ class Network(object):
@layer
def relu(self, input, name):
fluid = import_fluid()
output = fluid.layers.relu(
name=self.get_unique_output_name(name, 'relu'), x=input)
output = fluid.layers.relu(input)
return output
@layer
def prelu(self, input, channel_shared, name):
#fluid = import_fluid()
#output = fluid.layers.relu(input)
#return output
raise NotImplementedError('prelu not implemented')
def pool(self, pool_type, input, k_h, k_w, s_h, s_w, ceil_mode, padding,
name):
# Get the number of channels in the input
......@@ -382,7 +388,8 @@ class Network(object):
name,
scale_offset=True,
eps=1e-5,
relu=False):
relu=False,
relu_negative_slope=0.0):
# NOTE: Currently, only inference is supported
fluid = import_fluid()
prefix = name + '_'
......@@ -392,6 +399,15 @@ class Network(object):
name=prefix + 'offset')
mean_name = prefix + 'mean'
variance_name = prefix + 'variance'
leaky_relu = False
act = 'relu'
if relu is False:
act = None
elif relu_negative_slope != 0.0:
leaky_relu = True
act = None
output = fluid.layers.batch_norm(
name=self.get_unique_output_name(name, 'batch_norm'),
input=input,
......@@ -401,7 +417,10 @@ class Network(object):
moving_mean_name=mean_name,
moving_variance_name=variance_name,
epsilon=eps,
act='relu' if relu is True else None)
act=act)
if leaky_relu:
output = fluid.layers.leaky_relu(output, alpha=relu_negative_slope)
return output
......
......@@ -112,6 +112,13 @@ class PaddleMapper(NodeMapper):
def map_relu(self, node):
return PaddleNode('relu')
def map_prelu(self, node):
channel_shared = getattr(node.parameters, 'channel_shared', False)
return PaddleNode('prelu', channel_shared)
def map_tanh(self, node):
return PaddleNode('tanh')
def map_pooling(self, node):
pool_type = node.parameters.pool
if pool_type == 0:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册