提交 9bdc02ef 编写于 作者: driftcloudy's avatar driftcloudy

caffe2paddle, add relu6/upsample layer,support LeakReLU,repair axpy/dropout

上级 842108b5
......@@ -88,6 +88,49 @@ class CaffeGraph(Graph):
# filter them out here.
if (not exclude) and (phase == 'test'):
exclude = (type_str == 'Dropout')
'''
如果要去除Dropout Layer的话,原先这里写的不对,因为还得修正下一层Layer的bottom指向
例如:
layer {
name: "pool_8x8_s1"
type: "Pooling"
bottom: "inception_c2_concat"
top: "pool_8x8_s1"
pooling_param {
pool: AVE
global_pooling: true
}
}
layer {
name: "pool_8x8_s1_drop"
type: "Dropout"
bottom: "pool_8x8_s1"
top: "pool_8x8_s1_drop"
dropout_param {
dropout_ratio: 0.2
}
}
layer {
name: "classifier"
type: "InnerProduct"
bottom: "pool_8x8_s1_drop"
}
这种prototxt形式下,直接去除pool_8x8_s1_drop不行
会导致dropout的下一层找不到正确的bottom而报错
需要将下一层里的bottom指向dropout的上一层
'''
if layer.type == 'Dropout':
drop_layer_top = layer.top[0]
drop_layer_bottom = layer.bottom[0]
if drop_layer_top != drop_layer_bottom:
for next_layer in layers:
for next_layer_bottom_idx, next_layer_bottom in enumerate(next_layer.bottom):
if drop_layer_top == next_layer_bottom:
next_layer.bottom.remove(drop_layer_top)
next_layer.bottom.insert(next_layer_bottom_idx, drop_layer_bottom)
if not exclude:
filtered_layers.append(layer)
# Guard against dupes.
......
......@@ -10,6 +10,8 @@ from . import select
from . import shufflechannel
from . import convolutiondepthwise
from . import axpy
from . import upsample
from . import relu6
#custom layer import ends
custom_layers = get_registered_layers()
......
......@@ -2,7 +2,7 @@ from .register import register
from x2paddle.core.util import *
def axpy_shape(input_shape):
def axpy_shape(input_shapes):
assert len(input_shapes) == 3, "not valid input shape for axpy layer"
assert len(input_shapes[0]) == len(input_shapes[1]), 'should have same dims'
output_shape = input_shapes[1]
......@@ -18,7 +18,7 @@ def axpy_layer(inputs, input_shape=None, name=None):
y = inputs[2]
out = fluid.layers.elementwise_mul(x, alpha, axis=0)
out = fluid.layers.elementwise_add(out, y, name=name)
print(out)
return out
def axpy_weights(name, data=None):
......
from .register import register
from x2paddle.core.util import *
def relu6_shape(input_shape):
return input_shape
def relu6_layer(inputs, input_shape=None, name=None):
input = inputs[0]
out = fluid.layers.relu6(x=input)
return out
def relu6_weights(name, data=None):
weights_name = []
return weights_name
register(
kind='ReLU6',
shape=relu6_shape,
layer=relu6_layer,
weights=relu6_weights)
# -*- coding: utf-8 -*-
################################################################################
#
# Copyright (c) 2020 Baidu.com, Inc. All Rights Reserved
#
################################################################################
"""
Author: Drift
Email: wutuobang@baidu.com
Date: 2020/04/22 18:45
"""
from .register import register
from x2paddle.core.util import *
def upsample_shape(input_shapes, scale):
"""
:param input_shapes:
:param scale:
:return:
"""
assert len(input_shapes) == 1, "not valid input shape for upsample layer"
assert type(scale) is int
input_shape = input_shapes[0]
new_h = scale * input_shape[2]
new_w = scale * input_shape[3]
output_shape = [input_shape[0], input_shape[1], new_h, new_w]
return [output_shape]
def upsample_layer(inputs, scale, input_shape=None, name=None):
"""
:param inputs:
:param scale:
:param input_shape:
:param name:
:return:
"""
x = inputs[0]
out = fluid.layers.resize_nearest(x,
align_corners=False,
scale=scale,
name=name)
return out
def upsample_weights(name, data=None):
"""
:param name:
:param data:
:return:
"""
weights_name = []
return weights_name
register(kind='Upsample', shape=upsample_shape, layer=upsample_layer, weights=upsample_weights)
......@@ -23,7 +23,6 @@ from x2paddle.op_mapper.caffe_custom_layer import *
class CaffeOpMapper(OpMapper):
directly_map_ops = {
'ReLU': 'relu',
'AbsVal': 'abs',
'Sigmoid': 'sigmoid',
'TanH': 'tanh',
......@@ -435,6 +434,30 @@ class CaffeOpMapper(OpMapper):
node.fluid_code.add_layer(
"concat", inputs=inputs, output=node, param_attr=attr)
def ReLU(self, node):
"""
:param node:
:return:
"""
assert len(
node.inputs) == 1, 'The count of ReLU node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True)
# 如果存在negative_slope,则相当于 LeakyReLU
params = node.layer.relu_param
if params.HasField('negative_slope') and params.negative_slope != 0:
negative_slope = float(params.negative_slope)
attr = {
'alpha': negative_slope
}
node.fluid_code.add_layer(
'leaky_relu', inputs=input, output=node, param_attr=attr)
else:
node.fluid_code.add_layer(
'relu', inputs=input, output=node)
def PReLU(self, node):
assert len(
node.inputs) == 1, 'The count of PReLU node\'s input is not 1.'
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册