未验证 提交 353244f4 编写于 作者: J Jiabin Yang 提交者: GitHub

test=develop, add FC and test (#16604)

* test=develop, add FC and test

* test=develop, refine code
上级 bd193781
......@@ -65,7 +65,7 @@ class LayerObjectHelper(LayerHelperBase):
def _input(self, inputs_in):
inputs = self._multiple_input(inputs_in)
if len(inputs) != 1:
raise "{0} layer only takes one input".format(self.layer_type)
raise "{0} layer only takes one input in".format(self.layer_type)
return inputs[0]
def _multiple_param_attr(self, length, param_attr_in=None):
......@@ -74,7 +74,8 @@ class LayerObjectHelper(LayerHelperBase):
param_attr = [param_attr]
if len(param_attr) != 1 and len(param_attr) != length:
raise ValueError("parameter number mismatch")
raise ValueError("parameter number mismatch in {}".format(
self.name))
elif len(param_attr) == 1 and length != 1:
tmp = [None] * length
for i in six.moves.range(length):
......@@ -91,6 +92,10 @@ class LayerObjectHelper(LayerHelperBase):
Returns input, param_attr
"""
param_attr_in = ParamAttr._to_attr(param_attr_in)
if isinstance(param_attr_in, bool):
raise ValueError('Param_attr should not be False in {}'.format(
self.name))
inputs = inputs_in if (inputs_in is not None) else []
inputs = self._multiple_input(inputs)
param_attrs = self._multiple_param_attr(len(inputs), param_attr_in)
......@@ -112,8 +117,8 @@ class LayerObjectHelper(LayerHelperBase):
if dtype is None:
dtype = each.dtype
elif dtype != each.dtype:
raise ValueError("Data Type mismatch: %d to %d" %
(dtype, each.dtype))
raise ValueError("Data Type mismatch: %d to %d in %s" %
(dtype, each.dtype, self.name))
return dtype
def get_parameter(self, name):
......@@ -126,7 +131,8 @@ class LayerObjectHelper(LayerHelperBase):
"""
param = self.main_program.global_block().var(name)
if not isinstance(param, Parameter):
raise ValueError("no Parameter name %s found" % name)
raise ValueError("no Parameter name %s found in %s" %
(name, self.name))
return param
def append_bias_op(self,
......@@ -184,7 +190,8 @@ class LayerObjectHelper(LayerHelperBase):
if isinstance(act, six.string_types):
act = {'type': act}
else:
raise TypeError(str(act) + " should be unicode or str")
raise TypeError(
str(act) + " should be unicode or str in %s ", self.name)
if (use_cudnn is not None) and use_cudnn:
act['use_cudnn'] = use_cudnn
......@@ -211,5 +218,6 @@ class LayerObjectHelper(LayerHelperBase):
"""
param = param
if not isinstance(param, cls):
raise TypeError("The input {0} parameter of method {1} must be {2}",
param, self.layer_type, cls.__name__)
raise TypeError(
"The input {0} parameter of method {1} must be {2}, in layer {3}",
param, self.layer_type, cls.__name__, self.name)
......@@ -20,7 +20,7 @@ import numpy as np
from .. import core
from ..layers import utils
from . import layers
from ..framework import Variable, OpProtoHolder
from ..framework import Variable, OpProtoHolder, Parameter
from ..layers import layer_function_generator
from ..param_attr import ParamAttr
from ..initializer import Normal, Constant, NumpyArrayInitializer
......@@ -213,46 +213,69 @@ class FC(layers.Layer):
self._param_attr = param_attr
self._bias_attr = bias_attr
self._act = act
self.__w = list()
def _build_once(self, input):
input_shape = input.shape
param_shape = [
reduce(lambda a, b: a * b, input_shape[self._num_flatten_dims:], 1)
] + [self._size]
self._w = self.create_parameter(
attr=self._param_attr,
shape=param_shape,
dtype=self._dtype,
is_bias=False)
@property
def _w(self, i=0):
return self.__w[i]
if self._bias_attr:
size = list([self._size])
self._b = self.create_parameter(
attr=self._bias_attr,
shape=size,
dtype=self._dtype,
is_bias=True)
else:
self._b = None
@_w.setter
def _w(self, value, i=0):
assert isinstance(value, Parameter)
self.__w[i] = value
def forward(self, input):
tmp = self._helper.create_variable_for_type_inference(self._dtype)
self._helper.append_op(
type="mul",
inputs={"X": input,
"Y": self._w},
outputs={"Out": tmp},
attrs={
"x_num_col_dims": self._num_flatten_dims,
"y_num_col_dims": 1
})
def _build_once(self, input):
i = 0
for inp, param in self._helper.iter_inputs_and_params(input,
self._param_attr):
input_shape = inp.shape
param_shape = [
reduce(lambda a, b: a * b, input_shape[self._num_flatten_dims:],
1)
] + [self._size]
self.__w.append(
self.add_parameter(
'_w%d' % i,
self.create_parameter(
attr=param,
shape=param_shape,
dtype=self._dtype,
is_bias=False)))
i += 1
size = list([self._size])
self._b = self.create_parameter(
attr=self._bias_attr, shape=size, dtype=self._dtype, is_bias=True)
pre_bias = self._helper.create_variable_for_type_inference(self._dtype)
self._helper.append_op(
type="sum",
inputs={"X": [tmp]},
outputs={"Out": pre_bias},
attrs={"use_mkldnn": False})
def forward(self, input):
mul_results = list()
i = 0
for inp, param in self._helper.iter_inputs_and_params(input,
self._param_attr):
tmp = self._helper.create_variable_for_type_inference(self._dtype)
self._helper.append_op(
type="mul",
inputs={"X": inp,
"Y": self.__w[i]},
outputs={"Out": tmp},
attrs={
"x_num_col_dims": self._num_flatten_dims,
"y_num_col_dims": 1
})
i += 1
mul_results.append(tmp)
if len(mul_results) == 1:
pre_bias = mul_results[0]
else:
pre_bias = self._helper.create_variable_for_type_inference(
self._dtype)
self._helper.append_op(
type="sum",
inputs={"X": mul_results},
outputs={"Out": pre_bias},
attrs={"use_mkldnn": False})
if self._b:
pre_activation = self._helper.create_variable_for_type_inference(
......
......@@ -76,6 +76,41 @@ class LayerTest(unittest.TestCase):
class TestLayer(LayerTest):
def test_fc(self):
# pdb.set_trace()
inp = np.ones([3, 32, 32], dtype='float32')
with self.static_graph():
t = layers.data(
name='data',
shape=[3, 32, 32],
dtype='float32',
append_batch_size=False)
ret = layers.fc(t, size=4, bias_attr=False, num_flatten_dims=1)
ret2 = layers.fc(ret, size=4)
static_ret = self.get_static_graph_result(
feed={'data': inp}, fetch_list=[ret2])[0]
with self.static_graph():
t = layers.data(
name='data',
shape=[3, 32, 32],
dtype='float32',
append_batch_size=False)
fc1 = nn.FC('fc1', size=4, bias_attr=False, num_flatten_dims=1)
fc2 = nn.FC('fc2', size=4)
ret = fc1(t)
ret2 = fc2(ret)
static_ret2 = self.get_static_graph_result(
feed={'data': inp}, fetch_list=[ret2])[0]
with self.dynamic_graph():
t = base.to_variable(inp)
fc1 = nn.FC('fc1', size=4, bias_attr=False, num_flatten_dims=1)
fc2 = nn.FC('fc2', size=4)
ret = fc1(t)
dy_ret = fc2(ret)
self.assertTrue(np.array_equal(static_ret, static_ret2))
self.assertTrue(np.array_equal(static_ret, dy_ret._numpy()))
def test_layer_norm(self):
inp = np.ones([3, 32, 32], dtype='float32')
with self.static_graph():
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册