未验证 提交 11de384c 编写于 作者: L LielinJiang 提交者: GitHub

Split callbacks unittest (#29914)

* split callback unittest

* rm test_callback from timeout list
上级 01950ceb
...@@ -48,5 +48,4 @@ set_tests_properties(test_dataset_wmt PROPERTIES TIMEOUT 120) ...@@ -48,5 +48,4 @@ set_tests_properties(test_dataset_wmt PROPERTIES TIMEOUT 120)
set_tests_properties(test_vision_models PROPERTIES TIMEOUT 120) set_tests_properties(test_vision_models PROPERTIES TIMEOUT 120)
set_tests_properties(test_dataset_uci_housing PROPERTIES TIMEOUT 120) set_tests_properties(test_dataset_uci_housing PROPERTIES TIMEOUT 120)
set_tests_properties(test_dataset_imdb PROPERTIES TIMEOUT 150) set_tests_properties(test_dataset_imdb PROPERTIES TIMEOUT 150)
set_tests_properties(test_callbacks PROPERTIES TIMEOUT 120)
set_tests_properties(test_pretrained_model PROPERTIES TIMEOUT 600) set_tests_properties(test_pretrained_model PROPERTIES TIMEOUT 600)
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import time
import random
import tempfile
import shutil
import numpy as np
import paddle
from paddle import Model
from paddle.static import InputSpec
from paddle.vision.models import LeNet
from paddle.hapi.callbacks import config_callbacks
from paddle.vision.datasets import MNIST
from paddle.metric import Accuracy
from paddle.nn.layer.loss import CrossEntropyLoss
class MnistDataset(MNIST):
def __init__(self, mode, return_label=True, sample_num=None):
super(MnistDataset, self).__init__(mode=mode)
self.return_label = return_label
if sample_num:
self.images = self.images[:sample_num]
self.labels = self.labels[:sample_num]
def __getitem__(self, idx):
img, label = self.images[idx], self.labels[idx]
img = np.reshape(img, [1, 28, 28])
if self.return_label:
return img, np.array(self.labels[idx]).astype('int64')
return img,
def __len__(self):
return len(self.images)
class TestCallbacks(unittest.TestCase):
def setUp(self):
self.save_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.save_dir)
def test_earlystopping(self):
paddle.seed(2020)
for dynamic in [True, False]:
paddle.enable_static if not dynamic else None
device = paddle.set_device('cpu')
sample_num = 100
train_dataset = MnistDataset(mode='train', sample_num=sample_num)
val_dataset = MnistDataset(mode='test', sample_num=sample_num)
net = LeNet()
optim = paddle.optimizer.Adam(
learning_rate=0.001, parameters=net.parameters())
inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')]
labels = [InputSpec([None, 1], 'int64', 'label')]
model = Model(net, inputs=inputs, labels=labels)
model.prepare(
optim,
loss=CrossEntropyLoss(reduction="sum"),
metrics=[Accuracy()])
callbacks_0 = paddle.callbacks.EarlyStopping(
'loss',
mode='min',
patience=1,
verbose=1,
min_delta=0,
baseline=None,
save_best_model=True)
callbacks_1 = paddle.callbacks.EarlyStopping(
'acc',
mode='auto',
patience=1,
verbose=1,
min_delta=0,
baseline=0,
save_best_model=True)
callbacks_2 = paddle.callbacks.EarlyStopping(
'loss',
mode='auto_',
patience=1,
verbose=1,
min_delta=0,
baseline=None,
save_best_model=True)
callbacks_3 = paddle.callbacks.EarlyStopping(
'acc_',
mode='max',
patience=1,
verbose=1,
min_delta=0,
baseline=0,
save_best_model=True)
model.fit(
train_dataset,
val_dataset,
batch_size=64,
save_freq=10,
save_dir=self.save_dir,
epochs=10,
verbose=0,
callbacks=[callbacks_0, callbacks_1, callbacks_2, callbacks_3])
# Test for no val_loader
model.fit(train_dataset,
batch_size=64,
save_freq=10,
save_dir=self.save_dir,
epochs=10,
verbose=0,
callbacks=[callbacks_0])
if __name__ == '__main__':
unittest.main()
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import time
import random
import tempfile
import shutil
import numpy as np
import paddle
from paddle import Model
from paddle.static import InputSpec
from paddle.vision.models import LeNet
from paddle.hapi.callbacks import config_callbacks
import paddle.vision.transforms as T
from paddle.vision.datasets import MNIST
from paddle.metric import Accuracy
from paddle.nn.layer.loss import CrossEntropyLoss
class MnistDataset(MNIST):
def __len__(self):
return 512
class TestCallbacks(unittest.TestCase):
def setUp(self):
self.save_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.save_dir)
def test_visualdl_callback(self):
# visualdl not support python2
if sys.version_info < (3, ):
return
inputs = [InputSpec([-1, 1, 28, 28], 'float32', 'image')]
labels = [InputSpec([None, 1], 'int64', 'label')]
transform = T.Compose([T.Transpose(), T.Normalize([127.5], [127.5])])
train_dataset = MnistDataset(mode='train', transform=transform)
eval_dataset = MnistDataset(mode='test', transform=transform)
net = paddle.vision.LeNet()
model = paddle.Model(net, inputs, labels)
optim = paddle.optimizer.Adam(0.001, parameters=net.parameters())
model.prepare(
optimizer=optim,
loss=paddle.nn.CrossEntropyLoss(),
metrics=paddle.metric.Accuracy())
callback = paddle.callbacks.VisualDL(log_dir='visualdl_log_dir')
model.fit(train_dataset,
eval_dataset,
batch_size=64,
callbacks=callback)
if __name__ == '__main__':
unittest.main()
...@@ -59,9 +59,9 @@ class TestCallbacks(unittest.TestCase): ...@@ -59,9 +59,9 @@ class TestCallbacks(unittest.TestCase):
def run_callback(self): def run_callback(self):
epochs = 2 epochs = 2
steps = 50 steps = 5
freq = 2 freq = 2
eval_steps = 20 eval_steps = 2
inputs = [InputSpec([None, 1, 28, 28], 'float32', 'image')] inputs = [InputSpec([None, 1, 28, 28], 'float32', 'image')]
lenet = Model(LeNet(), inputs) lenet = Model(LeNet(), inputs)
...@@ -132,106 +132,6 @@ class TestCallbacks(unittest.TestCase): ...@@ -132,106 +132,6 @@ class TestCallbacks(unittest.TestCase):
self.verbose = 3 self.verbose = 3
self.run_callback() self.run_callback()
def test_visualdl_callback(self):
# visualdl not support python2
if sys.version_info < (3, ):
return
inputs = [InputSpec([-1, 1, 28, 28], 'float32', 'image')]
labels = [InputSpec([None, 1], 'int64', 'label')]
transform = T.Compose([T.Transpose(), T.Normalize([127.5], [127.5])])
train_dataset = paddle.vision.datasets.MNIST(
mode='train', transform=transform)
eval_dataset = paddle.vision.datasets.MNIST(
mode='test', transform=transform)
net = paddle.vision.LeNet()
model = paddle.Model(net, inputs, labels)
optim = paddle.optimizer.Adam(0.001, parameters=net.parameters())
model.prepare(
optimizer=optim,
loss=paddle.nn.CrossEntropyLoss(),
metrics=paddle.metric.Accuracy())
callback = paddle.callbacks.VisualDL(log_dir='visualdl_log_dir')
model.fit(train_dataset,
eval_dataset,
batch_size=64,
callbacks=callback)
def test_earlystopping(self):
paddle.seed(2020)
for dynamic in [True, False]:
paddle.enable_static if not dynamic else None
device = paddle.set_device('cpu')
sample_num = 100
train_dataset = MnistDataset(mode='train', sample_num=sample_num)
val_dataset = MnistDataset(mode='test', sample_num=sample_num)
net = LeNet()
optim = paddle.optimizer.Adam(
learning_rate=0.001, parameters=net.parameters())
inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')]
labels = [InputSpec([None, 1], 'int64', 'label')]
model = Model(net, inputs=inputs, labels=labels)
model.prepare(
optim,
loss=CrossEntropyLoss(reduction="sum"),
metrics=[Accuracy()])
callbacks_0 = paddle.callbacks.EarlyStopping(
'loss',
mode='min',
patience=1,
verbose=1,
min_delta=0,
baseline=None,
save_best_model=True)
callbacks_1 = paddle.callbacks.EarlyStopping(
'acc',
mode='auto',
patience=1,
verbose=1,
min_delta=0,
baseline=0,
save_best_model=True)
callbacks_2 = paddle.callbacks.EarlyStopping(
'loss',
mode='auto_',
patience=1,
verbose=1,
min_delta=0,
baseline=None,
save_best_model=True)
callbacks_3 = paddle.callbacks.EarlyStopping(
'acc_',
mode='max',
patience=1,
verbose=1,
min_delta=0,
baseline=0,
save_best_model=True)
model.fit(
train_dataset,
val_dataset,
batch_size=64,
save_freq=10,
save_dir=self.save_dir,
epochs=10,
verbose=0,
callbacks=[callbacks_0, callbacks_1, callbacks_2, callbacks_3])
# Test for no val_loader
model.fit(train_dataset,
batch_size=64,
save_freq=10,
save_dir=self.save_dir,
epochs=10,
verbose=0,
callbacks=[callbacks_0])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()
...@@ -100,7 +100,6 @@ diable_wingpu_test="^test_analysis_predictor$|\ ...@@ -100,7 +100,6 @@ diable_wingpu_test="^test_analysis_predictor$|\
^test_weight_decay$|\ ^test_weight_decay$|\
^test_conv2d_int8_mkldnn_op$|\ ^test_conv2d_int8_mkldnn_op$|\
^test_crypto$|\ ^test_crypto$|\
^test_callbacks$|\
^test_program_prune_backward$|\ ^test_program_prune_backward$|\
^test_imperative_ocr_attention_model$|\ ^test_imperative_ocr_attention_model$|\
^test_sentiment$|\ ^test_sentiment$|\
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册