From 87ab93af01247003fa38f79864189ce4f21f41f9 Mon Sep 17 00:00:00 2001 From: Zeng Jinle <32832641+sneaxiy@users.noreply.github.com> Date: Tue, 3 Dec 2019 10:19:18 +0800 Subject: [PATCH] fix adam fp64, test=develop (#21423) --- python/paddle/fluid/optimizer.py | 3 - .../test_adam_optimizer_fp32_fp64.py | 66 +++++++++++++++++++ 2 files changed, 66 insertions(+), 3 deletions(-) create mode 100644 python/paddle/fluid/tests/unittests/test_adam_optimizer_fp32_fp64.py diff --git a/python/paddle/fluid/optimizer.py b/python/paddle/fluid/optimizer.py index 230211d3089..97c5168f205 100644 --- a/python/paddle/fluid/optimizer.py +++ b/python/paddle/fluid/optimizer.py @@ -1628,14 +1628,12 @@ class AdamOptimizer(Optimizer): self._add_accumulator( name=self._beta1_pow_acc_str, param=p, - dtype='float32', fill_value=0.9 if isinstance(self._beta1, Variable) \ else self._beta1, shape=[1]) self._add_accumulator( name=self._beta2_pow_acc_str, param=p, - dtype='float32', fill_value=0.999 if isinstance(self._beta2, Variable) \ else self._beta2, shape=[1]) @@ -1835,7 +1833,6 @@ class AdamaxOptimizer(Optimizer): self._add_accumulator( name=self._beta1_pow_acc_str, param=p, - dtype='float32', fill_value=self._beta1, shape=[1]) diff --git a/python/paddle/fluid/tests/unittests/test_adam_optimizer_fp32_fp64.py b/python/paddle/fluid/tests/unittests/test_adam_optimizer_fp32_fp64.py new file mode 100644 index 00000000000..5ad83179e3c --- /dev/null +++ b/python/paddle/fluid/tests/unittests/test_adam_optimizer_fp32_fp64.py @@ -0,0 +1,66 @@ +# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import paddle +import paddle.fluid as fluid +import unittest + + +def get_places(): + places = [fluid.CPUPlace()] + if fluid.is_compiled_with_cuda(): + places.append(fluid.CUDAPlace(0)) + return places + + +def main_test_func(place, dtype): + main = fluid.Program() + startup = fluid.Program() + with fluid.program_guard(main, startup): + with fluid.scope_guard(fluid.Scope()): + x = fluid.data(name='x', shape=[None, 13], dtype=dtype) + y = fluid.data(name='y', shape=[None, 1], dtype=dtype) + y_predict = fluid.layers.fc(input=x, size=1, act=None) + cost = fluid.layers.square_error_cost(input=y_predict, label=y) + avg_cost = fluid.layers.mean(cost) + + adam_optimizer = fluid.optimizer.AdamOptimizer(0.01) + adam_optimizer.minimize(avg_cost) + + fetch_list = [avg_cost] + train_reader = fluid.io.batch( + paddle.dataset.uci_housing.train(), batch_size=1) + feeder = fluid.DataFeeder(place=place, feed_list=[x, y]) + exe = fluid.Executor(place) + exe.run(fluid.default_startup_program()) + for data in train_reader(): + exe.run(main, feed=feeder.feed(data), fetch_list=fetch_list) + + +class AdamFp32Test(unittest.TestCase): + def setUp(self): + self.dtype = 'float32' + + def test_main(self): + for p in get_places(): + main_test_func(p, self.dtype) + + +class AdamFp64Test(AdamFp32Test): + def setUp(self): + self.dtype = 'float64' + + +if __name__ == '__main__': + unittest.main() -- GitLab