diff --git a/python/paddle/fluid/imperative/nn.py b/python/paddle/fluid/imperative/nn.py index 8754e5d4d0c8c829303f1fe9cd39ead36619ac3b..eeca33708418747389f51000d840c9c3b9bac152 100644 --- a/python/paddle/fluid/imperative/nn.py +++ b/python/paddle/fluid/imperative/nn.py @@ -209,14 +209,22 @@ class FC(layers.Layer): def __init__(self, size, param_attr=None, + bias_attr=None, num_flatten_dims=1, - dtype=core.VarDesc.VarType.FP32): + dtype=core.VarDesc.VarType.FP32, + act=None, + name=None): super(FC, self).__init__() self._size = size self._num_flatten_dims = num_flatten_dims self._dtype = dtype from ..layer_helper import LayerHelper - self._helper = LayerHelper('FC', param_attr=param_attr) + self._helper = LayerHelper( + 'FC', + param_attr=param_attr, + bias_attr=bias_attr, + act=act, + name=name) def _build_once(self, input): input_shape = input.shape @@ -247,4 +255,8 @@ class FC(layers.Layer): inputs={"X": [tmp]}, outputs={"Out": out}, attrs={"use_mkldnn": False}) - return out + # add bias + pre_activation = self._helper.append_bias_op( + out, dim_start=self._num_flatten_dims) + # add activation + return self._helper.append_activation(pre_activation) diff --git a/python/paddle/fluid/tests/unittests/test_imperative_base.py b/python/paddle/fluid/tests/unittests/test_imperative_base.py index 478cc13fb5bb775b3a40e674e70555fa50117836..1dd5348a8852d78fde73ab9ddf9d0015e903cb3f 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_base.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_base.py @@ -21,10 +21,11 @@ from paddle.fluid import core @contextlib.contextmanager -def new_program_scope(): - prog = fluid.Program() - startup_prog = fluid.Program() - scope = fluid.core.Scope() +def new_program_scope(main=None, startup=None, scope=None): + prog = main if main else fluid.Program() + startup_prog = startup if startup else fluid.Program() + scope = scope if scope else fluid.core.Scope() with fluid.scope_guard(scope): with fluid.program_guard(prog, startup_prog): - yield + with fluid.unique_name.guard(): + yield diff --git a/python/paddle/fluid/tests/unittests/test_imperative_gan.py b/python/paddle/fluid/tests/unittests/test_imperative_gan.py new file mode 100644 index 0000000000000000000000000000000000000000..9748e0a37768541b67c48e1084e7ef17ac687cf1 --- /dev/null +++ b/python/paddle/fluid/tests/unittests/test_imperative_gan.py @@ -0,0 +1,134 @@ +# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import contextlib +import unittest +import numpy as np +import six +import sys + +import paddle +import paddle.fluid as fluid +from paddle.fluid.optimizer import SGDOptimizer +from paddle.fluid.imperative.nn import Conv2D, Pool2D, FC +from test_imperative_base import new_program_scope + + +class Discriminator(fluid.imperative.Layer): + def __init__(self): + super(Discriminator, self).__init__() + self._fc1 = FC(size=32, act='elu', name="d_fc1") + self._fc2 = FC(size=1, name="d_fc2") + + def forward(self, inputs): + x = self._fc1(inputs) + return self._fc2(x) + + +class Generator(fluid.imperative.Layer): + def __init__(self): + super(Generator, self).__init__() + self._fc1 = FC(size=64, act='elu', name="g_fc1") + self._fc2 = FC(size=64, act='elu', name="g_fc2") + self._fc3 = FC(size=1, name="g_fc3") + + def forward(self, inputs): + x = self._fc1(inputs) + x = self._fc2(x) + return self._fc3(x) + + +class TestImperativeMnist(unittest.TestCase): + def test_mnist_cpu_float32(self): + seed = 90 + + startup = fluid.Program() + startup.random_seed = seed + discriminate_p = fluid.Program() + scope = fluid.core.Scope() + exe = fluid.Executor(fluid.CPUPlace()) + with new_program_scope( + main=discriminate_p, startup=startup, scope=scope): + fluid.default_main_program().random_seed = seed + + discriminator = Discriminator() + generator = Generator() + + img = fluid.layers.data( + name="img", shape=[2, 1], append_batch_size=False) + noise = fluid.layers.data( + name="noise", shape=[2, 2], append_batch_size=False) + + label = fluid.layers.data( + name='label', + shape=[2, 1], + dtype='float32', + append_batch_size=False) + + d_real = discriminator(img) + d_loss_real = fluid.layers.reduce_mean( + fluid.layers.sigmoid_cross_entropy_with_logits( + x=d_real, label=label)) + + d_fake = discriminator(generator(noise)) + d_loss_fake = fluid.layers.reduce_mean( + fluid.layers.sigmoid_cross_entropy_with_logits( + x=d_fake, label=label)) + + d_loss = d_loss_real + d_loss_fake + + sgd = SGDOptimizer(learning_rate=1e-3) + sgd.minimize(d_loss) + + generate_p = fluid.Program() + with new_program_scope(main=generate_p, startup=startup, scope=scope): + fluid.default_main_program().random_seed = seed + + discriminator = Discriminator() + generator = Generator() + + noise = fluid.layers.data( + name="noise", shape=[2, 2], append_batch_size=False) + label = fluid.layers.data( + name='label', + shape=[2, 1], + dtype='float32', + append_batch_size=False) + + d_fake = discriminator(generator(noise)) + g_loss = fluid.layers.reduce_mean( + fluid.layers.sigmoid_cross_entropy_with_logits( + x=d_fake, label=label)) + + sgd = SGDOptimizer(learning_rate=1e-3) + sgd.minimize(g_loss) + + img = np.ones([2, 1], np.float32) + label = np.ones([2, 1], np.float32) + noise = np.ones([2, 2], np.float32) + exe.run(startup) + d_loss_val = exe.run(discriminate_p, + feed={'img': img, + 'noise': noise, + 'label': label}, + fetch_list=[d_loss])[0] + g_loss_val = exe.run(generate_p, + feed={'noise': noise, + 'label': label}, + fetch_list=[g_loss])[0] + sys.stderr.write('d_loss %s, g_loss: %s\n' % (d_loss_val, g_loss_val)) + + +if __name__ == '__main__': + unittest.main()