未验证 提交 5c153e0a 编写于 作者: B Bai Yifan 提交者: GitHub

[Cherry-Pick]Add darts unittest (#271)

* add darts unittest
上级 b6ba314d
...@@ -36,7 +36,6 @@ add_arg = functools.partial(add_arguments, argparser=parser) ...@@ -36,7 +36,6 @@ add_arg = functools.partial(add_arguments, argparser=parser)
# yapf: disable # yapf: disable
add_arg('log_freq', int, 50, "Log frequency.") add_arg('log_freq', int, 50, "Log frequency.")
add_arg('use_multiprocess', bool, True, "Whether use multiprocess reader.") add_arg('use_multiprocess', bool, True, "Whether use multiprocess reader.")
add_arg('data', str, 'dataset/cifar10',"The dir of dataset.")
add_arg('batch_size', int, 64, "Minibatch size.") add_arg('batch_size', int, 64, "Minibatch size.")
add_arg('learning_rate', float, 0.025, "The start learning rate.") add_arg('learning_rate', float, 0.025, "The start learning rate.")
add_arg('momentum', float, 0.9, "Momentum.") add_arg('momentum', float, 0.9, "Momentum.")
......
...@@ -206,8 +206,6 @@ class DARTSearch(object): ...@@ -206,8 +206,6 @@ class DARTSearch(object):
if self.use_data_parallel: if self.use_data_parallel:
self.train_reader = fluid.contrib.reader.distributed_batch_reader( self.train_reader = fluid.contrib.reader.distributed_batch_reader(
self.train_reader) self.train_reader)
self.valid_reader = fluid.contrib.reader.distributed_batch_reader(
self.valid_reader)
train_loader = fluid.io.DataLoader.from_generator( train_loader = fluid.io.DataLoader.from_generator(
capacity=64, capacity=64,
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
import unittest
import paddle.fluid as fluid
import numpy as np
from paddleslim.nas.darts import DARTSearch
from layers import conv_bn_layer
class TestDARTS(unittest.TestCase):
def test_darts(self):
class SuperNet(fluid.dygraph.Layer):
def __init__(self):
super(SuperNet, self).__init__()
self._method = 'DARTS'
self._steps = 1
self.stem = fluid.dygraph.nn.Conv2D(
num_channels=1, num_filters=3, filter_size=3, padding=1)
self.classifier = fluid.dygraph.nn.Linear(
input_dim=2352, output_dim=10)
self._multiplier = 4
self._primitives = [
'none', 'max_pool_3x3', 'avg_pool_3x3', 'skip_connect',
'sep_conv_3x3', 'sep_conv_5x5', 'dil_conv_3x3',
'dil_conv_5x5'
]
self._initialize_alphas()
def _initialize_alphas(self):
self.alphas_normal = fluid.layers.create_parameter(
shape=[14, 8], dtype="float32")
self.alphas_reduce = fluid.layers.create_parameter(
shape=[14, 8], dtype="float32")
self._arch_parameters = [
self.alphas_normal,
self.alphas_reduce,
]
def arch_parameters(self):
return self._arch_parameters
def forward(self, input):
out = self.stem(input) * self.alphas_normal[0][
0] * self.alphas_reduce[0][0]
out = fluid.layers.reshape(out, [0, -1])
logits = self.classifier(out)
return logits
def _loss(self, input, label):
logits = self.forward(input)
return fluid.layers.reduce_mean(
fluid.layers.softmax_with_cross_entropy(logits, label))
def batch_generator(reader):
def wrapper():
batch_data = []
batch_label = []
for sample in reader():
image = np.array(sample[0]).reshape(1, 28, 28)
label = np.array(sample[1]).reshape(1)
batch_data.append(image)
batch_label.append(label)
if len(batch_data) == 128:
batch_data = np.array(batch_data, dtype='float32')
batch_label = np.array(batch_label, dtype='int64')
yield [batch_data, batch_label]
batch_data = []
batch_label = []
return wrapper
place = fluid.CUDAPlace(0)
with fluid.dygraph.guard(place):
model = SuperNet()
trainset = paddle.dataset.mnist.train()
validset = paddle.dataset.mnist.test()
train_reader = batch_generator(trainset)
valid_reader = batch_generator(validset)
searcher = DARTSearch(
model, train_reader, valid_reader, place, num_epochs=5)
searcher.train()
if __name__ == '__main__':
unittest.main()
...@@ -19,8 +19,8 @@ from paddleslim.dist import merge, fsp_loss ...@@ -19,8 +19,8 @@ from paddleslim.dist import merge, fsp_loss
from layers import conv_bn_layer from layers import conv_bn_layer
class TestMerge(unittest.TestCase): class TestFSPLoss(unittest.TestCase):
def test_merge(self): def test_fsp_loss(self):
student_main = fluid.Program() student_main = fluid.Program()
student_startup = fluid.Program() student_startup = fluid.Program()
with fluid.program_guard(student_main, student_startup): with fluid.program_guard(student_main, student_startup):
......
...@@ -19,8 +19,8 @@ from paddleslim.dist import merge, l2_loss ...@@ -19,8 +19,8 @@ from paddleslim.dist import merge, l2_loss
from layers import conv_bn_layer from layers import conv_bn_layer
class TestMerge(unittest.TestCase): class TestL2Loss(unittest.TestCase):
def test_merge(self): def test_l2_loss(self):
student_main = fluid.Program() student_main = fluid.Program()
student_startup = fluid.Program() student_startup = fluid.Program()
with fluid.program_guard(student_main, student_startup): with fluid.program_guard(student_main, student_startup):
......
...@@ -19,8 +19,8 @@ from paddleslim.dist import merge, loss ...@@ -19,8 +19,8 @@ from paddleslim.dist import merge, loss
from layers import conv_bn_layer from layers import conv_bn_layer
class TestMerge(unittest.TestCase): class TestLoss(unittest.TestCase):
def test_merge(self): def test_loss(self):
student_main = fluid.Program() student_main = fluid.Program()
student_startup = fluid.Program() student_startup = fluid.Program()
with fluid.program_guard(student_main, student_startup): with fluid.program_guard(student_main, student_startup):
......
...@@ -19,8 +19,8 @@ from paddleslim.dist import merge, soft_label_loss ...@@ -19,8 +19,8 @@ from paddleslim.dist import merge, soft_label_loss
from layers import conv_bn_layer from layers import conv_bn_layer
class TestMerge(unittest.TestCase): class TestSoftLabelLoss(unittest.TestCase):
def test_merge(self): def test_soft_label_loss(self):
student_main = fluid.Program() student_main = fluid.Program()
student_startup = fluid.Program() student_startup = fluid.Program()
with fluid.program_guard(student_main, student_startup): with fluid.program_guard(student_main, student_startup):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册