diff --git a/configs/_base_/readers/mask_reader.yml b/configs/_base_/readers/mask_reader.yml index c4908308ff5b9853996653b0dfbba7346a2a2f6a..07159041f1bcc67a733da01db3d5481853af62f2 100644 --- a/configs/_base_/readers/mask_reader.yml +++ b/configs/_base_/readers/mask_reader.yml @@ -12,7 +12,7 @@ TrainReader: - PadBatch: {pad_to_stride: 32, use_padded_im_info: false, pad_gt: true} batch_size: 1 shuffle: true - drop_last: false + drop_last: true EvalReader: diff --git a/ppdet/data/reader.py b/ppdet/data/reader.py index 06e642acca17041a3d279f52945bd7068d4876f2..c279e5abd9b70119d5b898f0ba4614d369412691 100644 --- a/ppdet/data/reader.py +++ b/ppdet/data/reader.py @@ -135,7 +135,7 @@ class TrainReader(BaseDataLoader): batch_transforms=None, batch_size=1, shuffle=True, - drop_last=False, + drop_last=True, drop_empty=True, num_classes=81, with_background=True): diff --git a/ppdet/data/source/coco.py b/ppdet/data/source/coco.py index 49ac310256630400ed37777d2df141e36ea519ca..69e383ac795671ce700b39672b157d8dbe8098f8 100644 --- a/ppdet/data/source/coco.py +++ b/ppdet/data/source/coco.py @@ -129,10 +129,9 @@ class COCODataSet(DetDataset): gt_bbox[i, :] = box['clean_bbox'] is_crowd[i][0] = box['iscrowd'] # check RLE format - if box['iscrowd'] == 1: + if 'segmentation' in box and box['iscrowd'] == 1: gt_poly[i] = [[0.0, 0.0], ] - continue - if 'segmentation' in box: + elif 'segmentation' in box: gt_poly[i] = box['segmentation'] if not any(gt_poly): diff --git a/ppdet/data/source/dataset.py b/ppdet/data/source/dataset.py index c8a2a9599688ccc2b8a442ca7f17750d7abf002d..f5ef1aad5f0982b7d5b1fbe623d3fe608a08e01c 100644 --- a/ppdet/data/source/dataset.py +++ b/ppdet/data/source/dataset.py @@ -22,6 +22,7 @@ except Exception: from paddle.io import Dataset from ppdet.core.workspace import register, serializable from ppdet.utils.download import get_dataset_path +import copy @serializable @@ -45,7 +46,7 @@ class DetDataset(Dataset): def __getitem__(self, idx): # data batch - roidb = self.roidbs[idx] + roidb = copy.deepcopy(self.roidbs[idx]) # data augment roidb = self.transform(roidb) # data item diff --git a/ppdet/modeling/architecture/meta_arch.py b/ppdet/modeling/architecture/meta_arch.py index ff6776269be857013c8e861821df9cfd7c6653aa..51d1852478c456deaec5203e785b46deed8ae138 100644 --- a/ppdet/modeling/architecture/meta_arch.py +++ b/ppdet/modeling/architecture/meta_arch.py @@ -32,8 +32,7 @@ class BaseArch(nn.Layer): def build_inputs(self, data, input_def): inputs = {} for i, k in enumerate(input_def): - v = paddle.to_tensor(data[i]) - inputs[k] = v + inputs[k] = data[i] return inputs def model_arch(self): diff --git a/tools/train.py b/tools/train.py index 716b7b8a62b1f43199e98580aacdce9991fe6255..a59dd192d305ccfee320272e6363d32fc13b27ac 100755 --- a/tools/train.py +++ b/tools/train.py @@ -126,7 +126,7 @@ def run(FLAGS, cfg, place): model = create(cfg.architecture) # Optimizer - lr = create('LearningRate')(step_per_epoch / int(ParallelEnv().nranks)) + lr = create('LearningRate')(step_per_epoch) optimizer = create('OptimizerBuilder')(lr, model.parameters()) # Init Model & Optimzer