From a62a45b2dd688b1b539e85d884e4191fc9525afe Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Fri, 11 Sep 2020 16:59:13 -0700 Subject: [PATCH] prevent testloader caching on --notest --- train.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/train.py b/train.py index 94f3673c4..4aae4cbf3 100644 --- a/train.py +++ b/train.py @@ -165,8 +165,8 @@ def train(hyp, opt, device, tb_writer=None): # Trainloader dataloader, dataset = create_dataloader(train_path, imgsz, batch_size, gs, opt, - hyp=hyp, augment=True, cache=opt.cache_images, rect=opt.rect, rank=rank, - world_size=opt.world_size, workers=opt.workers) + hyp=hyp, augment=True, cache=opt.cache_images, rect=opt.rect, + rank=rank, world_size=opt.world_size, workers=opt.workers) mlc = np.concatenate(dataset.labels, 0)[:, 0].max() # max label class nb = len(dataloader) # number of batches assert mlc < nc, 'Label class %g exceeds nc=%g in %s. Possible class labels are 0-%g' % (mlc, nc, opt.data, nc - 1) @@ -175,8 +175,8 @@ def train(hyp, opt, device, tb_writer=None): if rank in [-1, 0]: ema.updates = start_epoch * nb // accumulate # set EMA updates testloader = create_dataloader(test_path, imgsz_test, total_batch_size, gs, opt, - hyp=hyp, augment=False, cache=opt.cache_images, rect=True, rank=-1, - world_size=opt.world_size, workers=opt.workers)[0] # testloader + hyp=hyp, augment=False, cache=opt.cache_images and not opt.notest, rect=True, + rank=-1, world_size=opt.world_size, workers=opt.workers)[0] # testloader if not opt.resume: labels = np.concatenate(dataset.labels, 0)