提交 e6c908f6 编写于 作者: C channingss

support model Face_cyclegan

上级 f8f633b9
from .register import register
from x2paddle.core.util import *
def InstanceNormalization_shape(input_shape):
return input_shape
def InstanceNormalization_layer(inputs, name=None):
# TODO(lvmengsi@baidu.com): Check the accuracy when using fluid.layers.layer_norm.
epsilon = 1e-5
mean = fluid.layers.reduce_mean(inputs, dim=[2, 3], keep_dim=True)
var = fluid.layers.reduce_mean(fluid.layers.square(inputs - mean),
dim=[2, 3],
keep_dim=True)
if name is not None:
scale_name = name + "_scale"
offset_name = name + "_offset"
scale_param = fluid.ParamAttr(name=scale_name,
initializer=fluid.initializer.Constant(1.0),
trainable=True)
offset_param = fluid.ParamAttr(name=offset_name,
initializer=fluid.initializer.Constant(0.0),
trainable=True)
scale = fluid.layers.create_parameter(attr=scale_param,
shape=inputs.shape[1:2],
dtype="float32")
offset = fluid.layers.create_parameter(attr=offset_param,
shape=inputs.shape[1:2],
dtype="float32")
tmp = fluid.layers.elementwise_mul(x=(inputs - mean), y=scale, axis=1)
tmp = tmp / fluid.layers.sqrt(var + epsilon)
tmp = fluid.layers.elementwise_add(tmp, offset, axis=1)
return tmp
def InstanceNormalization_weights(name, data=None):
weights_name = [name + '_scale']
return weights_name
register(kind='InstanceNormalization',
shape=InstanceNormalization_shape,
layer=InstanceNormalization_layer,
weights=InstanceNormalization_weights)
from .register import get_registered_layers
#custom layer import begins
from . import InstanceNormalization
#custom layer import ends
custom_layers = get_registered_layers()
def set_args(f, params):
""" set args for function 'f' using the parameters in node.layer.param
Args:
f (function): a python function object
params (object): a object contains attributes needed by f's arguments
Returns:
arg_names (list): a list of argument names
kwargs (dict): a dict contains needed arguments
"""
argc = f.__code__.co_argcount
arg_list = f.__code__.co_varnames[0:argc]
kwargs = {}
for arg_name in arg_list:
if hasattr(params, arg_name) and params is not None:
kwargs[arg_name] = getattr(params, arg_name)
return arg_list, kwargs
def has_layer(layer_type):
""" test whether this layer exists in custom layer
"""
return layer_type in custom_layers
def get_params(layer, layer_type):
import re
if layer_type.lower() == "deconvolution" or layer_type.lower(
) == "convolutiondepthwise":
param_name = '_'.join(('convolution', 'param'))
elif layer_type.lower() == "normalize":
param_name = '_'.join(('norm', 'param'))
elif len(layer_type) - len(re.sub("[A-Z]", "", layer_type)) >= 2:
s = ''
tmp_name = ''
for i, ch in enumerate(layer_type):
if i == 0:
s += ch.lower()
continue
elif ch.isupper() and layer_type[i - 1].islower():
tmp_name += (s + '_')
s = ''
s += ch.lower()
tmp_name += s
param_name = '_'.join((tmp_name, 'param'))
else:
param_name = '_'.join((layer_type.lower(), 'param'))
return getattr(layer, param_name, None)
def compute_output_shape(node):
""" compute the output shape of custom layer
"""
layer_type = node.layer_type
assert layer_type in custom_layers, "layer[%s] not exist in custom layers" % (
layer_type)
shape_func = custom_layers[layer_type]['shape']
layer = node.layer
params = get_params(layer, layer_type)
arg_names, kwargs = set_args(shape_func, params)
input_shape = node.input_shape
return shape_func(input_shape, **kwargs)
def make_custom_layer(node):
""" get the code which implement the custom layer function
"""
layer_type = node.layer_type
assert layer_type in custom_layers, "layer[%s] not exist in custom layers" % (
layer_type)
layer_func = custom_layers[layer_type]['layer']
import inspect
return inspect.getsource(layer_func), layer_func
def deal_weights(node, data=None):
""" deal the weights of the custom layer
"""
layer_type = node.layer_type
weights_func = custom_layers[layer_type]['weights']
name = node.layer_name
return weights_func(name, data)
""" this module provides 'register' for registering customized layers
"""
g_custom_layers = {}
def register(kind, shape, layer, weights):
""" register a custom layer or a list of custom layers
Args:
@kind (str or list): type name of the layer
@shape (function): a function to generate the shape of layer's output
@layer (function): a function to generate the paddle code of layer
@weights (function): a function to deal with weights data
Returns:
None
"""
assert type(shape).__name__ == 'function', 'shape should be a function'
assert type(layer).__name__ == 'function', 'layer should be a function'
if type(kind) is str:
kind = [kind]
else:
assert type(
kind
) is list, 'invalid param "kind" for register, not a list or str'
for k in kind:
assert type(
k) is str, 'invalid param "kind" for register, not a list of str'
assert k not in g_custom_layers, 'this type[%s] has already been registered' % (
k)
print('register layer[%s]' % (k))
g_custom_layers[k] = {
'shape': shape,
'layer': layer,
'weights': weights
}
def get_registered_layers():
return g_custom_layers
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册