From 31ea33c884797a67dbe8efc59e9251311319ae40 Mon Sep 17 00:00:00 2001 From: gaotingquan Date: Wed, 12 Apr 2023 06:45:44 +0000 Subject: [PATCH] revert the cutmix, mixup, fmix fixes because this change(commit: df31d808fcd13ef7840e6d7682150ddc5eab1628) will cause other issues, such as a change in the value of QA monitoring, so revert temporary. --- ppcls/data/preprocess/batch_ops/batch_operators.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ppcls/data/preprocess/batch_ops/batch_operators.py b/ppcls/data/preprocess/batch_ops/batch_operators.py index cdcee062..0040bda4 100644 --- a/ppcls/data/preprocess/batch_ops/batch_operators.py +++ b/ppcls/data/preprocess/batch_ops/batch_operators.py @@ -60,7 +60,7 @@ class BatchOperator(object): class MixupOperator(BatchOperator): - """ Mixup operator + """ Mixup operator reference: https://arxiv.org/abs/1710.09412 """ @@ -88,7 +88,7 @@ class MixupOperator(BatchOperator): def __call__(self, batch): imgs, labels, bs = self._unpack(batch) - idx = np.arange(bs)[::-1] + idx = np.random.permutation(bs) lam = np.random.beta(self._alpha, self._alpha) imgs = lam * imgs + (1 - lam) * imgs[idx] targets = self._mix_target(labels, labels[idx], lam) @@ -143,7 +143,7 @@ class CutmixOperator(BatchOperator): def __call__(self, batch): imgs, labels, bs = self._unpack(batch) - idx = np.arange(bs)[::-1] + idx = np.random.permutation(bs) lam = np.random.beta(self._alpha, self._alpha) bbx1, bby1, bbx2, bby2 = self._rand_bbox(imgs.shape, lam) @@ -155,9 +155,9 @@ class CutmixOperator(BatchOperator): class FmixOperator(BatchOperator): - """ Fmix operator + """ Fmix operator reference: https://arxiv.org/abs/2002.12047 - + """ def __init__(self, @@ -179,7 +179,7 @@ class FmixOperator(BatchOperator): def __call__(self, batch): imgs, labels, bs = self._unpack(batch) - idx = np.arange(bs)[::-1] + idx = np.random.permutation(bs) size = (imgs.shape[2], imgs.shape[3]) lam, mask = sample_mask(self._alpha, self._decay_power, \ size, self._max_soft, self._reformulate) -- GitLab