remove distilation loss

pull/166/head
ylf 2022-05-31 22:40:07 +08:00
parent c5913bf41f
commit 1b38e41db4
1 changed files with 2 additions and 2 deletions

View File

@ -38,8 +38,8 @@ def train_one_epoch(model: torch.nn.Module, criterion: DistillationLoss,
targets = targets.gt(0.0).type(targets.dtype)
with torch.cuda.amp.autocast():
outputs, _, _ = model(samples)
loss = criterion(samples, outputs, targets)
loss, _, _ = model(samples)
#loss = criterion(samples, outputs, targets)
loss_value = loss.item()