Logger Cleanup (#9828)
parent
e3ff780676
commit
acff977af3
|
@ -91,17 +91,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
|
|||
data_dict = None
|
||||
if RANK in {-1, 0}:
|
||||
logger = GenericLogger(opt=opt, console_logger=LOGGER)
|
||||
# loggers = Loggers(save_dir, weights, opt, hyp, LOGGER) # loggers instance
|
||||
# if loggers.clearml:
|
||||
# data_dict = loggers.clearml.data_dict # None if no ClearML dataset or filled in by ClearML
|
||||
# if loggers.wandb:
|
||||
# data_dict = loggers.wandb.data_dict
|
||||
# if resume:
|
||||
# weights, epochs, hyp, batch_size = opt.weights, opt.epochs, opt.hyp, opt.batch_size
|
||||
#
|
||||
# # Register actions
|
||||
# for k in methods(loggers):
|
||||
# callbacks.register_action(k, callback=getattr(loggers, k))
|
||||
|
||||
# Config
|
||||
plots = not evolve and not opt.noplots # create plots
|
||||
|
@ -400,7 +389,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
|
|||
'ema': deepcopy(ema.ema).half(),
|
||||
'updates': ema.updates,
|
||||
'optimizer': optimizer.state_dict(),
|
||||
# 'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None,
|
||||
'opt': vars(opt),
|
||||
'date': datetime.now().isoformat()}
|
||||
|
||||
|
|
4
train.py
4
train.py
|
@ -53,7 +53,6 @@ from utils.general import (LOGGER, check_amp, check_dataset, check_file, check_g
|
|||
one_cycle, print_args, print_mutation, strip_optimizer, yaml_save)
|
||||
from utils.loggers import Loggers
|
||||
from utils.loggers.comet.comet_utils import check_comet_resume
|
||||
from utils.loggers.wandb.wandb_utils import check_wandb_resume
|
||||
from utils.loss import ComputeLoss
|
||||
from utils.metrics import fitness
|
||||
from utils.plots import plot_evolve
|
||||
|
@ -375,7 +374,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
|
|||
'ema': deepcopy(ema.ema).half(),
|
||||
'updates': ema.updates,
|
||||
'optimizer': optimizer.state_dict(),
|
||||
'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None,
|
||||
'opt': vars(opt),
|
||||
'date': datetime.now().isoformat()}
|
||||
|
||||
|
@ -483,7 +481,7 @@ def main(opt, callbacks=Callbacks()):
|
|||
check_requirements()
|
||||
|
||||
# Resume (from specified or most recent last.pt)
|
||||
if opt.resume and not check_wandb_resume(opt) and not check_comet_resume(opt) and not opt.evolve:
|
||||
if opt.resume and not check_comet_resume(opt) and not opt.evolve:
|
||||
last = Path(check_file(opt.resume) if isinstance(opt.resume, str) else get_latest_run())
|
||||
opt_yaml = last.parent.parent / 'opt.yaml' # train options yaml
|
||||
opt_data = opt.data # original dataset
|
||||
|
|
|
@ -956,7 +956,7 @@ def strip_optimizer(f='best.pt', s=''): # from utils.general import *; strip_op
|
|||
x = torch.load(f, map_location=torch.device('cpu'))
|
||||
if x.get('ema'):
|
||||
x['model'] = x['ema'] # replace model with ema
|
||||
for k in 'optimizer', 'best_fitness', 'wandb_id', 'ema', 'updates': # keys
|
||||
for k in 'optimizer', 'best_fitness', 'ema', 'updates': # keys
|
||||
x[k] = None
|
||||
x['epoch'] = -1
|
||||
x['model'].half() # to FP16
|
||||
|
|
Loading…
Reference in New Issue