From ae768d6aeea9c6de41c10b3b2c1febe3db0922df Mon Sep 17 00:00:00 2001 From: gaotingquan Date: Wed, 21 Jul 2021 04:51:45 +0000 Subject: [PATCH 1/2] feat: support to enable mixup and cutmix at same time --- .../preprocess/batch_ops/batch_operators.py | 66 +++++++++++++------ 1 file changed, 45 insertions(+), 21 deletions(-) diff --git a/ppcls/data/preprocess/batch_ops/batch_operators.py b/ppcls/data/preprocess/batch_ops/batch_operators.py index 769045b54..55c1b5a44 100644 --- a/ppcls/data/preprocess/batch_ops/batch_operators.py +++ b/ppcls/data/preprocess/batch_ops/batch_operators.py @@ -23,6 +23,7 @@ from ppcls.data.preprocess.ops.fmix import sample_mask class BatchOperator(object): """ BatchOperator """ + def __init__(self, *args, **kwargs): pass @@ -45,35 +46,44 @@ class BatchOperator(object): class MixupOperator(BatchOperator): - """ Mixup operator """ - def __init__(self, alpha=0.2): - assert alpha > 0., \ - 'parameter alpha[%f] should > 0.0' % (alpha) - self._alpha = alpha + """Mixup and Cutmix operator""" - def __call__(self, batch): - imgs, labels, bs = self._unpack(batch) + def __init__(self, + mixup_alpha: float=1., + cutmix_alpha: float=0., + switch_prob: float=0.5): + """Build Mixup operator + + Args: + mixup_alpha (float, optional): The parameter alpha of mixup, mixup is active if > 0. Defaults to 1.. + cutmix_alpha (float, optional): The parameter alpha of cutmix, cutmix is active if > 0. Defaults to 0.. + switch_prob (float, optional): The probability of switching to cutmix instead of mixup when both are active. Defaults to 0.5. + + Raises: + Exception: The value of parameters are illegal. + """ + if mixup_alpha <= 0 and cutmix_alpha <= 0: + raise Exception( + f"At least one of parameter alpha of Mixup and Cutmix is greater than 0. mixup_alpha: {mixup_alpha}, cutmix_alpha: {cutmix_alpha}" + ) + self._mixup_alpha = mixup_alpha + self._cutmix_alpha = cutmix_alpha + self._switch_prob = switch_prob + + def _mixup(self, imgs, labels, bs): idx = np.random.permutation(bs) - lam = np.random.beta(self._alpha, self._alpha) + lam = np.random.beta(self._mixup_alpha, self._mixup_alpha) lams = np.array([lam] * bs, dtype=np.float32) imgs = lam * imgs + (1 - lam) * imgs[idx] return list(zip(imgs, labels, labels[idx], lams)) - -class CutmixOperator(BatchOperator): - """ Cutmix operator """ - def __init__(self, alpha=0.2): - assert alpha > 0., \ - 'parameter alpha[%f] should > 0.0' % (alpha) - self._alpha = alpha - def _rand_bbox(self, size, lam): """ _rand_bbox """ w = size[2] h = size[3] cut_rat = np.sqrt(1. - lam) - cut_w = np.int(w * cut_rat) - cut_h = np.int(h * cut_rat) + cut_w = int(w * cut_rat) + cut_h = int(h * cut_rat) # uniform cx = np.random.randint(w) @@ -86,10 +96,9 @@ class CutmixOperator(BatchOperator): return bbx1, bby1, bbx2, bby2 - def __call__(self, batch): - imgs, labels, bs = self._unpack(batch) + def _cutmix(self, imgs, labels, bs): idx = np.random.permutation(bs) - lam = np.random.beta(self._alpha, self._alpha) + lam = np.random.beta(self._cutmix_alpha, self._cutmix_alpha) bbx1, bby1, bbx2, bby2 = self._rand_bbox(imgs.shape, lam) imgs[:, :, bbx1:bbx2, bby1:bby2] = imgs[idx, :, bbx1:bbx2, bby1:bby2] @@ -98,9 +107,24 @@ class CutmixOperator(BatchOperator): lams = np.array([lam] * bs, dtype=np.float32) return list(zip(imgs, labels, labels[idx], lams)) + def __call__(self, batch): + imgs, labels, bs = self._unpack(batch) + if np.random.rand() < self._switch_prob: + return self._cutmix(imgs, labels, bs) + else: + return self._mixup(imgs, labels, bs) + + +class CutmixOperator(BatchOperator): + def __init__(self, **kwargs): + raise Exception( + f"\"CutmixOperator\" has been deprecated. Please use MixupOperator with \"cutmix_alpha\" and \"switch_prob\" to enable Cutmix. Refor to doc for details." + ) + class FmixOperator(BatchOperator): """ Fmix operator """ + def __init__(self, alpha=1, decay_power=3, max_soft=0., reformulate=False): self._alpha = alpha self._decay_power = decay_power From b578662b32cf86ca563fb9717f1252f5ec767eaf Mon Sep 17 00:00:00 2001 From: gaotingquan Date: Mon, 26 Jul 2021 07:36:54 +0000 Subject: [PATCH 2/2] perf: add OpSampler to support multiple ops --- ppcls/data/preprocess/__init__.py | 4 +- .../preprocess/batch_ops/batch_operators.py | 106 ++++++++++++------ 2 files changed, 77 insertions(+), 33 deletions(-) diff --git a/ppcls/data/preprocess/__init__.py b/ppcls/data/preprocess/__init__.py index ebd0df420..12bb34b2d 100644 --- a/ppcls/data/preprocess/__init__.py +++ b/ppcls/data/preprocess/__init__.py @@ -29,7 +29,7 @@ from ppcls.data.preprocess.ops.operators import NormalizeImage from ppcls.data.preprocess.ops.operators import ToCHWImage from ppcls.data.preprocess.ops.operators import AugMix -from ppcls.data.preprocess.batch_ops.batch_operators import MixupOperator, CutmixOperator, FmixOperator +from ppcls.data.preprocess.batch_ops.batch_operators import MixupOperator, CutmixOperator, OpSampler, FmixOperator import six import numpy as np @@ -45,6 +45,7 @@ def transform(data, ops=[]): class AutoAugment(RawImageNetPolicy): """ ImageNetPolicy wrapper to auto fit different img types """ + def __init__(self, *args, **kwargs): if six.PY2: super(AutoAugment, self).__init__(*args, **kwargs) @@ -69,6 +70,7 @@ class AutoAugment(RawImageNetPolicy): class RandAugment(RawRandAugment): """ RandAugment wrapper to auto fit different img types """ + def __init__(self, *args, **kwargs): if six.PY2: super(RandAugment, self).__init__(*args, **kwargs) diff --git a/ppcls/data/preprocess/batch_ops/batch_operators.py b/ppcls/data/preprocess/batch_ops/batch_operators.py index 55c1b5a44..1f3bd3253 100644 --- a/ppcls/data/preprocess/batch_ops/batch_operators.py +++ b/ppcls/data/preprocess/batch_ops/batch_operators.py @@ -16,8 +16,11 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals +import random + import numpy as np +from ppcls.utils import logger from ppcls.data.preprocess.ops.fmix import sample_mask @@ -46,37 +49,50 @@ class BatchOperator(object): class MixupOperator(BatchOperator): - """Mixup and Cutmix operator""" + """ Mixup operator """ - def __init__(self, - mixup_alpha: float=1., - cutmix_alpha: float=0., - switch_prob: float=0.5): + def __init__(self, alpha: float=1.): """Build Mixup operator Args: - mixup_alpha (float, optional): The parameter alpha of mixup, mixup is active if > 0. Defaults to 1.. - cutmix_alpha (float, optional): The parameter alpha of cutmix, cutmix is active if > 0. Defaults to 0.. - switch_prob (float, optional): The probability of switching to cutmix instead of mixup when both are active. Defaults to 0.5. + alpha (float, optional): The parameter alpha of mixup. Defaults to 1.. Raises: - Exception: The value of parameters are illegal. + Exception: The value of parameter is illegal. """ - if mixup_alpha <= 0 and cutmix_alpha <= 0: + if alpha <= 0: raise Exception( - f"At least one of parameter alpha of Mixup and Cutmix is greater than 0. mixup_alpha: {mixup_alpha}, cutmix_alpha: {cutmix_alpha}" + f"Parameter \"alpha\" of Mixup should be greater than 0. \"alpha\": {alpha}." ) - self._mixup_alpha = mixup_alpha - self._cutmix_alpha = cutmix_alpha - self._switch_prob = switch_prob + self._alpha = alpha - def _mixup(self, imgs, labels, bs): + def __call__(self, batch): + imgs, labels, bs = self._unpack(batch) idx = np.random.permutation(bs) - lam = np.random.beta(self._mixup_alpha, self._mixup_alpha) + lam = np.random.beta(self._alpha, self._alpha) lams = np.array([lam] * bs, dtype=np.float32) imgs = lam * imgs + (1 - lam) * imgs[idx] return list(zip(imgs, labels, labels[idx], lams)) + +class CutmixOperator(BatchOperator): + """ Cutmix operator """ + + def __init__(self, alpha=0.2): + """Build Cutmix operator + + Args: + alpha (float, optional): The parameter alpha of cutmix. Defaults to 0.2. + + Raises: + Exception: The value of parameter is illegal. + """ + if alpha <= 0: + raise Exception( + f"Parameter \"alpha\" of Cutmix should be greater than 0. \"alpha\": {alpha}." + ) + self._alpha = alpha + def _rand_bbox(self, size, lam): """ _rand_bbox """ w = size[2] @@ -96,9 +112,10 @@ class MixupOperator(BatchOperator): return bbx1, bby1, bbx2, bby2 - def _cutmix(self, imgs, labels, bs): + def __call__(self, batch): + imgs, labels, bs = self._unpack(batch) idx = np.random.permutation(bs) - lam = np.random.beta(self._cutmix_alpha, self._cutmix_alpha) + lam = np.random.beta(self._alpha, self._alpha) bbx1, bby1, bbx2, bby2 = self._rand_bbox(imgs.shape, lam) imgs[:, :, bbx1:bbx2, bby1:bby2] = imgs[idx, :, bbx1:bbx2, bby1:bby2] @@ -107,20 +124,6 @@ class MixupOperator(BatchOperator): lams = np.array([lam] * bs, dtype=np.float32) return list(zip(imgs, labels, labels[idx], lams)) - def __call__(self, batch): - imgs, labels, bs = self._unpack(batch) - if np.random.rand() < self._switch_prob: - return self._cutmix(imgs, labels, bs) - else: - return self._mixup(imgs, labels, bs) - - -class CutmixOperator(BatchOperator): - def __init__(self, **kwargs): - raise Exception( - f"\"CutmixOperator\" has been deprecated. Please use MixupOperator with \"cutmix_alpha\" and \"switch_prob\" to enable Cutmix. Refor to doc for details." - ) - class FmixOperator(BatchOperator): """ Fmix operator """ @@ -139,3 +142,42 @@ class FmixOperator(BatchOperator): size, self._max_soft, self._reformulate) imgs = mask * imgs + (1 - mask) * imgs[idx] return list(zip(imgs, labels, labels[idx], [lam] * bs)) + + +class OpSampler(object): + """ Sample a operator from """ + + def __init__(self, **op_dict): + """Build OpSampler + + Raises: + Exception: The parameter \"prob\" of operator(s) are be set error. + """ + if len(op_dict) < 1: + msg = f"ConfigWarning: No operator in \"OpSampler\". \"OpSampler\" has been skipped." + + self.ops = {} + total_prob = 0 + for op_name in op_dict: + param = op_dict[op_name] + if "prob" not in param: + msg = f"ConfigWarning: Parameter \"prob\" should be set when use operator in \"OpSampler\". The operator \"{op_name}\"'s prob has been set \"0\"." + logger.warning(msg) + prob = param.pop("prob", 0) + total_prob += prob + op = eval(op_name)(**param) + self.ops.update({op: prob}) + + if total_prob > 1: + msg = f"ConfigError: The total prob of operators in \"OpSampler\" should be less 1." + logger.error(msg) + raise Exception(msg) + + # add "None Op" when total_prob < 1, "None Op" do nothing + self.ops[None] = 1 - total_prob + + def __call__(self, batch): + op = random.choices( + list(self.ops.keys()), weights=list(self.ops.values()), k=1)[0] + # return batch directly when None Op + return op(batch) if op else batch