Throw when pretrained weights not available and pretrained=True (principle of least surprise).

pull/1804/head
Ross Wightman 2023-05-10 10:44:34 -07:00
parent 8ce9a2c00a
commit ff2464e2a0
1 changed files with 3 additions and 4 deletions

View File

@ -152,8 +152,7 @@ def load_pretrained(
"""
pretrained_cfg = pretrained_cfg or getattr(model, 'pretrained_cfg', None)
if not pretrained_cfg:
_logger.warning("Invalid pretrained config, cannot load weights.")
return
raise RuntimeError("Invalid pretrained config, cannot load weights. Use `pretrained=False` for random init.")
load_from, pretrained_loc = _resolve_pretrained_source(pretrained_cfg)
if load_from == 'state_dict':
@ -186,8 +185,8 @@ def load_pretrained(
else:
state_dict = load_state_dict_from_hf(pretrained_loc)
else:
_logger.warning("No pretrained weights exist or were found for this model. Using random initialization.")
return
model_name = pretrained_cfg.get('architecture', 'this model')
raise RuntimeError(f"No pretrained weights exist for {model_name}. Use `pretrained=False` for random init.")
if filter_fn is not None:
try: