mirror of https://github.com/facebookresearch/deit
remove distilation loss
parent
c5913bf41f
commit
1b38e41db4
|
@ -38,8 +38,8 @@ def train_one_epoch(model: torch.nn.Module, criterion: DistillationLoss,
|
|||
targets = targets.gt(0.0).type(targets.dtype)
|
||||
|
||||
with torch.cuda.amp.autocast():
|
||||
outputs, _, _ = model(samples)
|
||||
loss = criterion(samples, outputs, targets)
|
||||
loss, _, _ = model(samples)
|
||||
#loss = criterion(samples, outputs, targets)
|
||||
|
||||
loss_value = loss.item()
|
||||
|
||||
|
|
Loading…
Reference in New Issue