valid argument for GradScaler

pull/13483/head
Bala-Vignesh-Reddy 2025-01-05 21:02:57 +05:30
parent 9c4100021a
commit 5da8d8139a
1 changed files with 1 additions and 1 deletions

View File

@ -352,7 +352,7 @@ def train(hyp, opt, device, callbacks):
maps = np.zeros(nc) # mAP per class
results = (0, 0, 0, 0, 0, 0, 0) # P, R, mAP@.5, mAP@.5-.95, val_loss(box, obj, cls)
scheduler.last_epoch = start_epoch - 1 # do not move
scaler = torch.amp.GradScaler('cuda', enabled=amp)
scaler = torch.amp.GradScaler(enabled=amp)
stopper, stop = EarlyStopping(patience=opt.patience), False
compute_loss = ComputeLoss(model) # init loss class
callbacks.run("on_train_start")