未验证 提交 353244f4 编写于 作者: J Jiabin Yang 提交者: GitHub

test=develop, add FC and test (#16604)

* test=develop, add FC and test

* test=develop, refine code
上级 bd193781
...@@ -65,7 +65,7 @@ class LayerObjectHelper(LayerHelperBase): ...@@ -65,7 +65,7 @@ class LayerObjectHelper(LayerHelperBase):
def _input(self, inputs_in): def _input(self, inputs_in):
inputs = self._multiple_input(inputs_in) inputs = self._multiple_input(inputs_in)
if len(inputs) != 1: if len(inputs) != 1:
raise "{0} layer only takes one input".format(self.layer_type) raise "{0} layer only takes one input in".format(self.layer_type)
return inputs[0] return inputs[0]
def _multiple_param_attr(self, length, param_attr_in=None): def _multiple_param_attr(self, length, param_attr_in=None):
...@@ -74,7 +74,8 @@ class LayerObjectHelper(LayerHelperBase): ...@@ -74,7 +74,8 @@ class LayerObjectHelper(LayerHelperBase):
param_attr = [param_attr] param_attr = [param_attr]
if len(param_attr) != 1 and len(param_attr) != length: if len(param_attr) != 1 and len(param_attr) != length:
raise ValueError("parameter number mismatch") raise ValueError("parameter number mismatch in {}".format(
self.name))
elif len(param_attr) == 1 and length != 1: elif len(param_attr) == 1 and length != 1:
tmp = [None] * length tmp = [None] * length
for i in six.moves.range(length): for i in six.moves.range(length):
...@@ -91,6 +92,10 @@ class LayerObjectHelper(LayerHelperBase): ...@@ -91,6 +92,10 @@ class LayerObjectHelper(LayerHelperBase):
Returns input, param_attr Returns input, param_attr
""" """
param_attr_in = ParamAttr._to_attr(param_attr_in)
if isinstance(param_attr_in, bool):
raise ValueError('Param_attr should not be False in {}'.format(
self.name))
inputs = inputs_in if (inputs_in is not None) else [] inputs = inputs_in if (inputs_in is not None) else []
inputs = self._multiple_input(inputs) inputs = self._multiple_input(inputs)
param_attrs = self._multiple_param_attr(len(inputs), param_attr_in) param_attrs = self._multiple_param_attr(len(inputs), param_attr_in)
...@@ -112,8 +117,8 @@ class LayerObjectHelper(LayerHelperBase): ...@@ -112,8 +117,8 @@ class LayerObjectHelper(LayerHelperBase):
if dtype is None: if dtype is None:
dtype = each.dtype dtype = each.dtype
elif dtype != each.dtype: elif dtype != each.dtype:
raise ValueError("Data Type mismatch: %d to %d" % raise ValueError("Data Type mismatch: %d to %d in %s" %
(dtype, each.dtype)) (dtype, each.dtype, self.name))
return dtype return dtype
def get_parameter(self, name): def get_parameter(self, name):
...@@ -126,7 +131,8 @@ class LayerObjectHelper(LayerHelperBase): ...@@ -126,7 +131,8 @@ class LayerObjectHelper(LayerHelperBase):
""" """
param = self.main_program.global_block().var(name) param = self.main_program.global_block().var(name)
if not isinstance(param, Parameter): if not isinstance(param, Parameter):
raise ValueError("no Parameter name %s found" % name) raise ValueError("no Parameter name %s found in %s" %
(name, self.name))
return param return param
def append_bias_op(self, def append_bias_op(self,
...@@ -184,7 +190,8 @@ class LayerObjectHelper(LayerHelperBase): ...@@ -184,7 +190,8 @@ class LayerObjectHelper(LayerHelperBase):
if isinstance(act, six.string_types): if isinstance(act, six.string_types):
act = {'type': act} act = {'type': act}
else: else:
raise TypeError(str(act) + " should be unicode or str") raise TypeError(
str(act) + " should be unicode or str in %s ", self.name)
if (use_cudnn is not None) and use_cudnn: if (use_cudnn is not None) and use_cudnn:
act['use_cudnn'] = use_cudnn act['use_cudnn'] = use_cudnn
...@@ -211,5 +218,6 @@ class LayerObjectHelper(LayerHelperBase): ...@@ -211,5 +218,6 @@ class LayerObjectHelper(LayerHelperBase):
""" """
param = param param = param
if not isinstance(param, cls): if not isinstance(param, cls):
raise TypeError("The input {0} parameter of method {1} must be {2}", raise TypeError(
param, self.layer_type, cls.__name__) "The input {0} parameter of method {1} must be {2}, in layer {3}",
param, self.layer_type, cls.__name__, self.name)
...@@ -20,7 +20,7 @@ import numpy as np ...@@ -20,7 +20,7 @@ import numpy as np
from .. import core from .. import core
from ..layers import utils from ..layers import utils
from . import layers from . import layers
from ..framework import Variable, OpProtoHolder from ..framework import Variable, OpProtoHolder, Parameter
from ..layers import layer_function_generator from ..layers import layer_function_generator
from ..param_attr import ParamAttr from ..param_attr import ParamAttr
from ..initializer import Normal, Constant, NumpyArrayInitializer from ..initializer import Normal, Constant, NumpyArrayInitializer
...@@ -213,44 +213,67 @@ class FC(layers.Layer): ...@@ -213,44 +213,67 @@ class FC(layers.Layer):
self._param_attr = param_attr self._param_attr = param_attr
self._bias_attr = bias_attr self._bias_attr = bias_attr
self._act = act self._act = act
self.__w = list()
@property
def _w(self, i=0):
return self.__w[i]
@_w.setter
def _w(self, value, i=0):
assert isinstance(value, Parameter)
self.__w[i] = value
def _build_once(self, input): def _build_once(self, input):
input_shape = input.shape i = 0
for inp, param in self._helper.iter_inputs_and_params(input,
self._param_attr):
input_shape = inp.shape
param_shape = [ param_shape = [
reduce(lambda a, b: a * b, input_shape[self._num_flatten_dims:], 1) reduce(lambda a, b: a * b, input_shape[self._num_flatten_dims:],
1)
] + [self._size] ] + [self._size]
self._w = self.create_parameter( self.__w.append(
attr=self._param_attr, self.add_parameter(
'_w%d' % i,
self.create_parameter(
attr=param,
shape=param_shape, shape=param_shape,
dtype=self._dtype, dtype=self._dtype,
is_bias=False) is_bias=False)))
i += 1
if self._bias_attr:
size = list([self._size]) size = list([self._size])
self._b = self.create_parameter( self._b = self.create_parameter(
attr=self._bias_attr, attr=self._bias_attr, shape=size, dtype=self._dtype, is_bias=True)
shape=size,
dtype=self._dtype,
is_bias=True)
else:
self._b = None
def forward(self, input): def forward(self, input):
mul_results = list()
i = 0
for inp, param in self._helper.iter_inputs_and_params(input,
self._param_attr):
tmp = self._helper.create_variable_for_type_inference(self._dtype) tmp = self._helper.create_variable_for_type_inference(self._dtype)
self._helper.append_op( self._helper.append_op(
type="mul", type="mul",
inputs={"X": input, inputs={"X": inp,
"Y": self._w}, "Y": self.__w[i]},
outputs={"Out": tmp}, outputs={"Out": tmp},
attrs={ attrs={
"x_num_col_dims": self._num_flatten_dims, "x_num_col_dims": self._num_flatten_dims,
"y_num_col_dims": 1 "y_num_col_dims": 1
}) })
i += 1
mul_results.append(tmp)
pre_bias = self._helper.create_variable_for_type_inference(self._dtype) if len(mul_results) == 1:
pre_bias = mul_results[0]
else:
pre_bias = self._helper.create_variable_for_type_inference(
self._dtype)
self._helper.append_op( self._helper.append_op(
type="sum", type="sum",
inputs={"X": [tmp]}, inputs={"X": mul_results},
outputs={"Out": pre_bias}, outputs={"Out": pre_bias},
attrs={"use_mkldnn": False}) attrs={"use_mkldnn": False})
......
...@@ -76,6 +76,41 @@ class LayerTest(unittest.TestCase): ...@@ -76,6 +76,41 @@ class LayerTest(unittest.TestCase):
class TestLayer(LayerTest): class TestLayer(LayerTest):
def test_fc(self):
# pdb.set_trace()
inp = np.ones([3, 32, 32], dtype='float32')
with self.static_graph():
t = layers.data(
name='data',
shape=[3, 32, 32],
dtype='float32',
append_batch_size=False)
ret = layers.fc(t, size=4, bias_attr=False, num_flatten_dims=1)
ret2 = layers.fc(ret, size=4)
static_ret = self.get_static_graph_result(
feed={'data': inp}, fetch_list=[ret2])[0]
with self.static_graph():
t = layers.data(
name='data',
shape=[3, 32, 32],
dtype='float32',
append_batch_size=False)
fc1 = nn.FC('fc1', size=4, bias_attr=False, num_flatten_dims=1)
fc2 = nn.FC('fc2', size=4)
ret = fc1(t)
ret2 = fc2(ret)
static_ret2 = self.get_static_graph_result(
feed={'data': inp}, fetch_list=[ret2])[0]
with self.dynamic_graph():
t = base.to_variable(inp)
fc1 = nn.FC('fc1', size=4, bias_attr=False, num_flatten_dims=1)
fc2 = nn.FC('fc2', size=4)
ret = fc1(t)
dy_ret = fc2(ret)
self.assertTrue(np.array_equal(static_ret, static_ret2))
self.assertTrue(np.array_equal(static_ret, dy_ret._numpy()))
def test_layer_norm(self): def test_layer_norm(self):
inp = np.ones([3, 32, 32], dtype='float32') inp = np.ones([3, 32, 32], dtype='float32')
with self.static_graph(): with self.static_graph():
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册