Improved model+EMA checkpointing 2 (#2295)
parent
ec1d8496ba
commit
71dd2768f2
1
test.py
1
test.py
|
@ -269,6 +269,7 @@ def test(data,
|
||||||
print(f'pycocotools unable to run: {e}')
|
print(f'pycocotools unable to run: {e}')
|
||||||
|
|
||||||
# Return results
|
# Return results
|
||||||
|
model.float() # for training
|
||||||
if not training:
|
if not training:
|
||||||
s = f"\n{len(list(save_dir.glob('labels/*.txt')))} labels saved to {save_dir / 'labels'}" if save_txt else ''
|
s = f"\n{len(list(save_dir.glob('labels/*.txt')))} labels saved to {save_dir / 'labels'}" if save_txt else ''
|
||||||
print(f"Results saved to {save_dir}{s}")
|
print(f"Results saved to {save_dir}{s}")
|
||||||
|
|
7
train.py
7
train.py
|
@ -4,6 +4,7 @@ import math
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import time
|
import time
|
||||||
|
from copy import deepcopy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
|
|
||||||
|
@ -381,8 +382,8 @@ def train(hyp, opt, device, tb_writer=None, wandb=None):
|
||||||
ckpt = {'epoch': epoch,
|
ckpt = {'epoch': epoch,
|
||||||
'best_fitness': best_fitness,
|
'best_fitness': best_fitness,
|
||||||
'training_results': results_file.read_text(),
|
'training_results': results_file.read_text(),
|
||||||
'model': (model.module if is_parallel(model) else model).half(),
|
'model': deepcopy(model.module if is_parallel(model) else model).half(),
|
||||||
'ema': (ema.ema.half(), ema.updates),
|
'ema': (deepcopy(ema.ema).half(), ema.updates),
|
||||||
'optimizer': optimizer.state_dict(),
|
'optimizer': optimizer.state_dict(),
|
||||||
'wandb_id': wandb_run.id if wandb else None}
|
'wandb_id': wandb_run.id if wandb else None}
|
||||||
|
|
||||||
|
@ -392,8 +393,6 @@ def train(hyp, opt, device, tb_writer=None, wandb=None):
|
||||||
torch.save(ckpt, best)
|
torch.save(ckpt, best)
|
||||||
del ckpt
|
del ckpt
|
||||||
|
|
||||||
model.float(), ema.ema.float()
|
|
||||||
|
|
||||||
# end epoch ----------------------------------------------------------------------------------------------------
|
# end epoch ----------------------------------------------------------------------------------------------------
|
||||||
# end training
|
# end training
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue