W&B: track batch size after autobatch (#6039)
* track batch size after autobatch * remove redundant import * Update __init__.py * Update __init__.py Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>pull/5489/head
parent
c72270c076
commit
db6ec66a60
1
train.py
1
train.py
|
@ -138,6 +138,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
|
|||
# Batch size
|
||||
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size
|
||||
batch_size = check_train_batch_size(model, imgsz)
|
||||
loggers.on_params_update({"batch_size": batch_size})
|
||||
|
||||
# Optimizer
|
||||
nbs = 64 # nominal batch size
|
||||
|
|
|
@ -32,7 +32,7 @@ class Callbacks:
|
|||
'on_fit_epoch_end': [], # fit = train + val
|
||||
'on_model_save': [],
|
||||
'on_train_end': [],
|
||||
|
||||
'on_params_update': [],
|
||||
'teardown': [],
|
||||
}
|
||||
|
||||
|
|
|
@ -157,3 +157,9 @@ class Loggers():
|
|||
else:
|
||||
self.wandb.finish_run()
|
||||
self.wandb = WandbLogger(self.opt)
|
||||
|
||||
def on_params_update(self, params):
|
||||
# Update hyperparams or configs of the experiment
|
||||
# params: A dict containing {param: value} pairs
|
||||
if self.wandb:
|
||||
self.wandb.wandb_run.config.update(params, allow_val_change=True)
|
||||
|
|
Loading…
Reference in New Issue