mirror of
https://github.com/facebookresearch/deit.git
synced 2025-06-03 14:52:20 +08:00
remove distilation loss
This commit is contained in:
parent
c5913bf41f
commit
1b38e41db4
@ -38,8 +38,8 @@ def train_one_epoch(model: torch.nn.Module, criterion: DistillationLoss,
|
||||
targets = targets.gt(0.0).type(targets.dtype)
|
||||
|
||||
with torch.cuda.amp.autocast():
|
||||
outputs, _, _ = model(samples)
|
||||
loss = criterion(samples, outputs, targets)
|
||||
loss, _, _ = model(samples)
|
||||
#loss = criterion(samples, outputs, targets)
|
||||
|
||||
loss_value = loss.item()
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user