diff --git a/timm/data/mixup.py b/timm/data/mixup.py index be0bae36..26dc2391 100644 --- a/timm/data/mixup.py +++ b/timm/data/mixup.py @@ -120,7 +120,7 @@ class Mixup: def _params_per_elem(self, batch_size): lam = np.ones(batch_size, dtype=np.float32) - use_cutmix = np.zeros(batch_size, dtype=np.bool) + use_cutmix = np.zeros(batch_size, dtype=bool) if self.mixup_enabled: if self.mixup_alpha > 0. and self.cutmix_alpha > 0.: use_cutmix = np.random.rand(batch_size) < self.switch_prob @@ -131,7 +131,7 @@ class Mixup: elif self.mixup_alpha > 0.: lam_mix = np.random.beta(self.mixup_alpha, self.mixup_alpha, size=batch_size) elif self.cutmix_alpha > 0.: - use_cutmix = np.ones(batch_size, dtype=np.bool) + use_cutmix = np.ones(batch_size, dtype=bool) lam_mix = np.random.beta(self.cutmix_alpha, self.cutmix_alpha, size=batch_size) else: assert False, "One of mixup_alpha > 0., cutmix_alpha > 0., cutmix_minmax not None should be true." diff --git a/timm/models/volo.py b/timm/models/volo.py index 9401d84b..260cd20d 100644 --- a/timm/models/volo.py +++ b/timm/models/volo.py @@ -301,8 +301,8 @@ def rand_bbox(size, lam, scale=1): W = size[1] // scale H = size[2] // scale cut_rat = np.sqrt(1. - lam) - cut_w = np.int(W * cut_rat) - cut_h = np.int(H * cut_rat) + cut_w = (W * cut_rat).astype(int) + cut_h = (H * cut_rat).astype(int) # uniform cx = np.random.randint(W)