Compare commits

...

3 Commits

Author SHA1 Message Date
Glenn Jocher
dc54ed5763
--freeze fix (#6044)
Fix for https://github.com/ultralytics/yolov5/issues/6038
2021-12-20 18:24:07 +01:00
Glenn Jocher
b8a4babd60
Simplify set_logging() indexing (#6042) 2021-12-20 17:42:52 +01:00
Glenn Jocher
0db9d5b6a2
Kaggle LOGGER fix (#6041) 2021-12-20 17:30:46 +01:00
2 changed files with 5 additions and 3 deletions

View File

@ -60,7 +60,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
device,
callbacks
):
save_dir, epochs, batch_size, weights, single_cls, evolve, data, cfg, resume, noval, nosave, workers, freeze, = \
save_dir, epochs, batch_size, weights, single_cls, evolve, data, cfg, resume, noval, nosave, workers, freeze = \
Path(opt.save_dir), opt.epochs, opt.batch_size, opt.weights, opt.single_cls, opt.evolve, opt.data, opt.cfg, \
opt.resume, opt.noval, opt.nosave, opt.workers, opt.freeze
@ -124,7 +124,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
model = Model(cfg, ch=3, nc=nc, anchors=hyp.get('anchors')).to(device) # create
# Freeze
freeze = [f'model.{x}.' for x in (freeze if isinstance(freeze, list) else range(freeze))] # layers to freeze
freeze = [f'model.{x}.' for x in (freeze if len(freeze) > 1 else range(freeze[0]))] # layers to freeze
for k, v in model.named_parameters():
v.requires_grad = True # train all layers
if any(x in k for x in freeze):
@ -469,7 +469,7 @@ def parse_opt(known=False):
parser.add_argument('--linear-lr', action='store_true', help='linear LR')
parser.add_argument('--label-smoothing', type=float, default=0.0, help='Label smoothing epsilon')
parser.add_argument('--patience', type=int, default=100, help='EarlyStopping patience (epochs without improvement)')
parser.add_argument('--freeze', nargs='+', type=int, default=0, help='Freeze layers: backbone=10, first3=0 1 2')
parser.add_argument('--freeze', nargs='+', type=int, default=[0], help='Freeze layers: backbone=10, first3=0 1 2')
parser.add_argument('--save-period', type=int, default=-1, help='Save checkpoint every x epochs (disabled if < 1)')
parser.add_argument('--local_rank', type=int, default=-1, help='DDP parameter, do not modify')

View File

@ -46,6 +46,8 @@ os.environ['NUMEXPR_MAX_THREADS'] = str(NUM_THREADS) # NumExpr max threads
def set_logging(name=None, verbose=True):
# Sets level and returns logger
for h in logging.root.handlers:
logging.root.removeHandler(h) # remove all handlers associated with the root logger object
rank = int(os.getenv('RANK', -1)) # rank in world for Multi-GPU trainings
logging.basicConfig(format="%(message)s", level=logging.INFO if (verbose and rank in (-1, 0)) else logging.WARNING)
return logging.getLogger(name)