diff --git a/python/paddle/tests/CMakeLists.txt b/python/paddle/tests/CMakeLists.txt index 6b2bce79988890bcc34d11c4deff76d3c2d196e7..c88e22de9cfa3c95e93a27497a70a27473ec1f1b 100644 --- a/python/paddle/tests/CMakeLists.txt +++ b/python/paddle/tests/CMakeLists.txt @@ -48,5 +48,4 @@ set_tests_properties(test_dataset_wmt PROPERTIES TIMEOUT 120) set_tests_properties(test_vision_models PROPERTIES TIMEOUT 120) set_tests_properties(test_dataset_uci_housing PROPERTIES TIMEOUT 120) set_tests_properties(test_dataset_imdb PROPERTIES TIMEOUT 150) -set_tests_properties(test_callbacks PROPERTIES TIMEOUT 120) set_tests_properties(test_pretrained_model PROPERTIES TIMEOUT 600) diff --git a/python/paddle/tests/test_callback_early_stop.py b/python/paddle/tests/test_callback_early_stop.py new file mode 100644 index 0000000000000000000000000000000000000000..132f0e385c8fe5d6f3a35b5e9786ed7a9fd2fdc8 --- /dev/null +++ b/python/paddle/tests/test_callback_early_stop.py @@ -0,0 +1,131 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import time +import random +import tempfile +import shutil +import numpy as np + +import paddle +from paddle import Model +from paddle.static import InputSpec +from paddle.vision.models import LeNet +from paddle.hapi.callbacks import config_callbacks +from paddle.vision.datasets import MNIST +from paddle.metric import Accuracy +from paddle.nn.layer.loss import CrossEntropyLoss + + +class MnistDataset(MNIST): + def __init__(self, mode, return_label=True, sample_num=None): + super(MnistDataset, self).__init__(mode=mode) + self.return_label = return_label + if sample_num: + self.images = self.images[:sample_num] + self.labels = self.labels[:sample_num] + + def __getitem__(self, idx): + img, label = self.images[idx], self.labels[idx] + img = np.reshape(img, [1, 28, 28]) + if self.return_label: + return img, np.array(self.labels[idx]).astype('int64') + return img, + + def __len__(self): + return len(self.images) + + +class TestCallbacks(unittest.TestCase): + def setUp(self): + self.save_dir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.save_dir) + + def test_earlystopping(self): + paddle.seed(2020) + for dynamic in [True, False]: + paddle.enable_static if not dynamic else None + device = paddle.set_device('cpu') + sample_num = 100 + train_dataset = MnistDataset(mode='train', sample_num=sample_num) + val_dataset = MnistDataset(mode='test', sample_num=sample_num) + + net = LeNet() + optim = paddle.optimizer.Adam( + learning_rate=0.001, parameters=net.parameters()) + + inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')] + labels = [InputSpec([None, 1], 'int64', 'label')] + + model = Model(net, inputs=inputs, labels=labels) + model.prepare( + optim, + loss=CrossEntropyLoss(reduction="sum"), + metrics=[Accuracy()]) + callbacks_0 = paddle.callbacks.EarlyStopping( + 'loss', + mode='min', + patience=1, + verbose=1, + min_delta=0, + baseline=None, + save_best_model=True) + callbacks_1 = paddle.callbacks.EarlyStopping( + 'acc', + mode='auto', + patience=1, + verbose=1, + min_delta=0, + baseline=0, + save_best_model=True) + callbacks_2 = paddle.callbacks.EarlyStopping( + 'loss', + mode='auto_', + patience=1, + verbose=1, + min_delta=0, + baseline=None, + save_best_model=True) + callbacks_3 = paddle.callbacks.EarlyStopping( + 'acc_', + mode='max', + patience=1, + verbose=1, + min_delta=0, + baseline=0, + save_best_model=True) + model.fit( + train_dataset, + val_dataset, + batch_size=64, + save_freq=10, + save_dir=self.save_dir, + epochs=10, + verbose=0, + callbacks=[callbacks_0, callbacks_1, callbacks_2, callbacks_3]) + # Test for no val_loader + model.fit(train_dataset, + batch_size=64, + save_freq=10, + save_dir=self.save_dir, + epochs=10, + verbose=0, + callbacks=[callbacks_0]) + + +if __name__ == '__main__': + unittest.main() diff --git a/python/paddle/tests/test_callback_visualdl.py b/python/paddle/tests/test_callback_visualdl.py new file mode 100644 index 0000000000000000000000000000000000000000..36316183104fe3a19bfa5e9868e26e54f5405dd1 --- /dev/null +++ b/python/paddle/tests/test_callback_visualdl.py @@ -0,0 +1,75 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import unittest +import time +import random +import tempfile +import shutil +import numpy as np + +import paddle +from paddle import Model +from paddle.static import InputSpec +from paddle.vision.models import LeNet +from paddle.hapi.callbacks import config_callbacks +import paddle.vision.transforms as T +from paddle.vision.datasets import MNIST +from paddle.metric import Accuracy +from paddle.nn.layer.loss import CrossEntropyLoss + + +class MnistDataset(MNIST): + def __len__(self): + return 512 + + +class TestCallbacks(unittest.TestCase): + def setUp(self): + self.save_dir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.save_dir) + + def test_visualdl_callback(self): + # visualdl not support python2 + if sys.version_info < (3, ): + return + + inputs = [InputSpec([-1, 1, 28, 28], 'float32', 'image')] + labels = [InputSpec([None, 1], 'int64', 'label')] + + transform = T.Compose([T.Transpose(), T.Normalize([127.5], [127.5])]) + train_dataset = MnistDataset(mode='train', transform=transform) + eval_dataset = MnistDataset(mode='test', transform=transform) + + net = paddle.vision.LeNet() + model = paddle.Model(net, inputs, labels) + + optim = paddle.optimizer.Adam(0.001, parameters=net.parameters()) + model.prepare( + optimizer=optim, + loss=paddle.nn.CrossEntropyLoss(), + metrics=paddle.metric.Accuracy()) + + callback = paddle.callbacks.VisualDL(log_dir='visualdl_log_dir') + model.fit(train_dataset, + eval_dataset, + batch_size=64, + callbacks=callback) + + +if __name__ == '__main__': + unittest.main() diff --git a/python/paddle/tests/test_callbacks.py b/python/paddle/tests/test_callbacks.py index c5393e907ce16fef357242ad43396f35d3f4fdce..2c81549bab94c4810b558d8d44a398d5c689104c 100644 --- a/python/paddle/tests/test_callbacks.py +++ b/python/paddle/tests/test_callbacks.py @@ -59,9 +59,9 @@ class TestCallbacks(unittest.TestCase): def run_callback(self): epochs = 2 - steps = 50 + steps = 5 freq = 2 - eval_steps = 20 + eval_steps = 2 inputs = [InputSpec([None, 1, 28, 28], 'float32', 'image')] lenet = Model(LeNet(), inputs) @@ -132,106 +132,6 @@ class TestCallbacks(unittest.TestCase): self.verbose = 3 self.run_callback() - def test_visualdl_callback(self): - # visualdl not support python2 - if sys.version_info < (3, ): - return - - inputs = [InputSpec([-1, 1, 28, 28], 'float32', 'image')] - labels = [InputSpec([None, 1], 'int64', 'label')] - - transform = T.Compose([T.Transpose(), T.Normalize([127.5], [127.5])]) - train_dataset = paddle.vision.datasets.MNIST( - mode='train', transform=transform) - eval_dataset = paddle.vision.datasets.MNIST( - mode='test', transform=transform) - - net = paddle.vision.LeNet() - model = paddle.Model(net, inputs, labels) - - optim = paddle.optimizer.Adam(0.001, parameters=net.parameters()) - model.prepare( - optimizer=optim, - loss=paddle.nn.CrossEntropyLoss(), - metrics=paddle.metric.Accuracy()) - - callback = paddle.callbacks.VisualDL(log_dir='visualdl_log_dir') - model.fit(train_dataset, - eval_dataset, - batch_size=64, - callbacks=callback) - - def test_earlystopping(self): - paddle.seed(2020) - for dynamic in [True, False]: - paddle.enable_static if not dynamic else None - device = paddle.set_device('cpu') - sample_num = 100 - train_dataset = MnistDataset(mode='train', sample_num=sample_num) - val_dataset = MnistDataset(mode='test', sample_num=sample_num) - - net = LeNet() - optim = paddle.optimizer.Adam( - learning_rate=0.001, parameters=net.parameters()) - - inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')] - labels = [InputSpec([None, 1], 'int64', 'label')] - - model = Model(net, inputs=inputs, labels=labels) - model.prepare( - optim, - loss=CrossEntropyLoss(reduction="sum"), - metrics=[Accuracy()]) - callbacks_0 = paddle.callbacks.EarlyStopping( - 'loss', - mode='min', - patience=1, - verbose=1, - min_delta=0, - baseline=None, - save_best_model=True) - callbacks_1 = paddle.callbacks.EarlyStopping( - 'acc', - mode='auto', - patience=1, - verbose=1, - min_delta=0, - baseline=0, - save_best_model=True) - callbacks_2 = paddle.callbacks.EarlyStopping( - 'loss', - mode='auto_', - patience=1, - verbose=1, - min_delta=0, - baseline=None, - save_best_model=True) - callbacks_3 = paddle.callbacks.EarlyStopping( - 'acc_', - mode='max', - patience=1, - verbose=1, - min_delta=0, - baseline=0, - save_best_model=True) - model.fit( - train_dataset, - val_dataset, - batch_size=64, - save_freq=10, - save_dir=self.save_dir, - epochs=10, - verbose=0, - callbacks=[callbacks_0, callbacks_1, callbacks_2, callbacks_3]) - # Test for no val_loader - model.fit(train_dataset, - batch_size=64, - save_freq=10, - save_dir=self.save_dir, - epochs=10, - verbose=0, - callbacks=[callbacks_0]) - if __name__ == '__main__': unittest.main() diff --git a/tools/windows/run_unittests.sh b/tools/windows/run_unittests.sh index b89ce54edf149f14ba71db80b60c9c4d1a72c3ca..95b8e9b3e68a22c14a42e3ba77ba49929662fc1d 100644 --- a/tools/windows/run_unittests.sh +++ b/tools/windows/run_unittests.sh @@ -100,7 +100,6 @@ diable_wingpu_test="^test_analysis_predictor$|\ ^test_weight_decay$|\ ^test_conv2d_int8_mkldnn_op$|\ ^test_crypto$|\ -^test_callbacks$|\ ^test_program_prune_backward$|\ ^test_imperative_ocr_attention_model$|\ ^test_sentiment$|\