From 48d9fd08e5193a505a8dea48926f2ab2abfd129f Mon Sep 17 00:00:00 2001 From: dzhwinter Date: Tue, 26 Feb 2019 13:49:55 +0800 Subject: [PATCH] fix default value. test=develop --- .../unittests/ir_memory_optimize_net_base.py | 15 +++-- .../test_ir_memory_optimize_ifelse_net.py | 55 ------------------- 2 files changed, 10 insertions(+), 60 deletions(-) delete mode 100644 python/paddle/fluid/tests/unittests/test_ir_memory_optimize_ifelse_net.py diff --git a/python/paddle/fluid/tests/unittests/ir_memory_optimize_net_base.py b/python/paddle/fluid/tests/unittests/ir_memory_optimize_net_base.py index bf6adce8aca..079f0d22056 100644 --- a/python/paddle/fluid/tests/unittests/ir_memory_optimize_net_base.py +++ b/python/paddle/fluid/tests/unittests/ir_memory_optimize_net_base.py @@ -117,7 +117,7 @@ class TestIrMemOptBase(BuildIrMemOptBase): self.network = None def test_network(self): - if self.network is None: + if self.network is None or not core.is_compiled_with_cuda(): return baseline_first_loss, baseline_last_loss = None, None @@ -139,7 +139,12 @@ class TestIrMemOptBase(BuildIrMemOptBase): self.network, use_cuda=use_cuda, memory_opt=use_python_mem_opt) - self.assertAlmostEquals(np.mean(baseline_last_loss), - np.mean(cur_last_loss), delta=1e-2) - self.assertAlmostEquals(np.mean(baseline_first_loss), - np.mean(cur_first_loss), delta=1e-2) + + self.assertAlmostEquals( + np.mean(baseline_last_loss), + np.mean(cur_last_loss), + delta=1e-2) + self.assertAlmostEquals( + np.mean(baseline_first_loss), + np.mean(cur_first_loss), + delta=1e-2) diff --git a/python/paddle/fluid/tests/unittests/test_ir_memory_optimize_ifelse_net.py b/python/paddle/fluid/tests/unittests/test_ir_memory_optimize_ifelse_net.py deleted file mode 100644 index 7ae7920fb69..00000000000 --- a/python/paddle/fluid/tests/unittests/test_ir_memory_optimize_ifelse_net.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import paddle.fluid as fluid -import unittest -from ir_memory_optimize_net_base import TestIrMemOptBase -from paddle.fluid.layers.control_flow import ConditionalBlock - - -def lstm_net(data, - label, - dict_dim, - emb_dim=128, - hid_dim=128, - hid_dim2=96, - class_dim=2, - emb_lr=30.0): - emb = fluid.layers.embedding( - input=data, - size=[dict_dim, emb_dim], - param_attr=fluid.ParamAttr(learning_rate=emb_lr)) - fc0 = fluid.layers.fc(input=emb, size=hid_dim * 4) - - lstm_h, c = fluid.layers.dynamic_lstm( - input=fc0, size=hid_dim * 4, is_reverse=False) - lstm_max = fluid.layers.sequence_pool(input=lstm_h, pool_type='max') - lstm_max_tanh = fluid.layers.tanh(lstm_max) - fc1 = fluid.layers.fc(input=lstm_max_tanh, size=hid_dim2, act='tanh') - prediction = fluid.layers.fc(input=fc1, size=class_dim, act='softmax') - cost = fluid.layers.cross_entropy(input=prediction, label=label) - avg_cost = fluid.layers.mean(x=cost) - return avg_cost - - -class TestIrMemOptRNN(TestIrMemOptBase): - def setUp(self): - self.network = lstm_net - self.iter = 2 - - -if __name__ == "__main__": - unittest.main() -- GitLab