From 9764c13c0f8b6c55232351816afcde6e2c118738 Mon Sep 17 00:00:00 2001 From: LielinJiang Date: Tue, 31 Mar 2020 12:18:39 +0000 Subject: [PATCH] remove unused code --- image_classification/imagenet_dataset.py | 23 ++--------------------- image_classification/main.py | 7 ++++--- 2 files changed, 6 insertions(+), 24 deletions(-) diff --git a/image_classification/imagenet_dataset.py b/image_classification/imagenet_dataset.py index f1c4afc..6fcd884 100644 --- a/image_classification/imagenet_dataset.py +++ b/image_classification/imagenet_dataset.py @@ -3,7 +3,6 @@ import cv2 import math import random import numpy as np -from paddle.fluid.io import Dataset from datasets.folder import DatasetFolder @@ -65,24 +64,6 @@ def compose(functions): return process -def image_folder(path): - valid_ext = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.webp') - classes = [ - d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d)) - ] - classes.sort() - class_map = {cls: idx for idx, cls in enumerate(classes)} - samples = [] - for dir in sorted(class_map.keys()): - d = os.path.join(path, dir) - for root, _, fnames in sorted(os.walk(d)): - for fname in sorted(fnames): - p = os.path.join(root, fname) - if os.path.splitext(p)[1].lower() in valid_ext: - samples.append((p, [class_map[dir]])) - return samples - - class ImageNetDataset(DatasetFolder): def __init__(self, path, mode='train'): super(ImageNetDataset, self).__init__(path) @@ -96,8 +77,8 @@ class ImageNetDataset(DatasetFolder): [cv2.imread, center_crop_resize, normalize_permute]) def __getitem__(self, idx): - - return self.transform(self.samples[idx]) + img, label = self.samples[idx] + return self.transform((img, [label])) def __len__(self): return len(self.samples) diff --git a/image_classification/main.py b/image_classification/main.py index 9fb71e9..8f8a44e 100644 --- a/image_classification/main.py +++ b/image_classification/main.py @@ -40,7 +40,7 @@ def make_optimizer(step_per_epoch, parameter_list=None): momentum = 0.9 weight_decay = 1e-4 - boundaries = [step_per_epoch * e for e in [30, 60, 90]] + boundaries = [step_per_epoch * e for e in [30, 60, 80]] values = [base_lr * (0.1**i) for i in range(len(boundaries) + 1)] learning_rate = fluid.layers.piecewise_decay( boundaries=boundaries, values=values) @@ -61,7 +61,8 @@ def main(): device = set_device(FLAGS.device) fluid.enable_dygraph(device) if FLAGS.dynamic else None - model = models.__dict__[FLAGS.arch](pretrained=FLAGS.eval_only) + model = models.__dict__[FLAGS.arch](pretrained=FLAGS.eval_only and + not FLAGS.resume) if FLAGS.resume is not None: model.load(FLAGS.resume) @@ -115,7 +116,7 @@ if __name__ == '__main__': parser.add_argument( "-d", "--dynamic", action='store_true', help="enable dygraph mode") parser.add_argument( - "-e", "--epoch", default=120, type=int, help="number of epoch") + "-e", "--epoch", default=90, type=int, help="number of epoch") parser.add_argument( '--lr', '--learning-rate', -- GitLab