PaddleClas/ppcls/loss/__init__.py

92 lines
3.4 KiB
Python
Raw Normal View History

2021-05-31 14:17:59 +08:00
import copy
2021-06-03 11:00:47 +08:00
2021-05-31 14:17:31 +08:00
import paddle
import paddle.nn as nn
from ppcls.utils import logger
2021-05-31 14:17:31 +08:00
from .celoss import CELoss, MixCELoss
2021-06-21 22:25:07 +08:00
from .googlenetloss import GoogLeNetLoss
2021-06-03 11:00:47 +08:00
from .centerloss import CenterLoss
2022-10-17 15:45:45 +08:00
from .contrasiveloss import ContrastiveLoss
from .contrasiveloss import ContrastiveLoss_XBM
2021-05-31 14:17:59 +08:00
from .emlloss import EmlLoss
2021-06-03 11:00:47 +08:00
from .msmloss import MSMLoss
from .npairsloss import NpairsLoss
2021-05-31 14:17:59 +08:00
from .trihardloss import TriHardLoss
2021-06-03 11:00:47 +08:00
from .triplet import TripletLoss, TripletLossV2
from .tripletangularmarginloss import TripletAngularMarginLoss, TripletAngularMarginLoss_XBM
2021-06-04 16:44:24 +08:00
from .supconloss import SupConLoss
2022-12-12 17:50:17 +08:00
from .softsuploss import SoftSupConLoss
from .ccssl_loss import CCSSLCELoss
2021-06-03 19:36:26 +08:00
from .pairwisecosface import PairwiseCosface
from .dmlloss import DMLLoss
from .distanceloss import DistanceLoss
2022-09-08 16:11:25 +08:00
from .softtargetceloss import SoftTargetCrossEntropy
from .distillationloss import DistillationCELoss
from .distillationloss import DistillationGTCELoss
from .distillationloss import DistillationDMLLoss
from .distillationloss import DistillationDistanceLoss
from .distillationloss import DistillationRKDLoss
2022-02-28 19:11:50 +08:00
from .distillationloss import DistillationKLDivLoss
from .distillationloss import DistillationDKDLoss
from .distillationloss import DistillationWSLLoss
2022-11-08 19:08:17 +08:00
from .distillationloss import DistillationSKDLoss
from .distillationloss import DistillationMultiLabelLoss
from .distillationloss import DistillationDISTLoss
from .distillationloss import DistillationPairLoss
from .multilabelloss import MultiLabelLoss, MultiLabelAsymmetricLoss
2022-02-28 19:11:50 +08:00
from .afdloss import AFDLoss
2021-06-03 11:00:47 +08:00
2022-02-23 19:50:39 +08:00
from .deephashloss import DSHSDLoss
from .deephashloss import LCDSHLoss
from .deephashloss import DCHLoss
2021-11-03 18:09:34 +08:00
2023-02-07 14:56:56 +08:00
from .metabinloss import CELossForMetaBIN
from .metabinloss import TripletLossForMetaBIN
from .metabinloss import InterDomainShuffleLoss
from .metabinloss import IntraDomainScatterLoss
2021-05-31 14:17:59 +08:00
class CombinedLoss(nn.Layer):
def __init__(self, config_list):
2021-05-31 14:17:59 +08:00
super().__init__()
self.loss_func = []
2021-05-31 14:17:59 +08:00
self.loss_weight = []
assert isinstance(config_list, list), (
'operator config should be a list')
for config in config_list:
assert isinstance(config,
dict) and len(config) == 1, "yaml format error"
name = list(config)[0]
param = config[name]
assert "weight" in param, "weight must be in param, but param just contains {}".format(
param.keys())
self.loss_weight.append(param.pop("weight"))
self.loss_func.append(eval(name)(**param))
self.loss_func = nn.LayerList(self.loss_func)
2021-05-31 14:17:31 +08:00
2021-05-31 14:17:59 +08:00
def __call__(self, input, batch):
loss_dict = {}
# just for accelerate classification traing speed
if len(self.loss_func) == 1:
loss = self.loss_func[0](input, batch)
2021-05-31 14:17:59 +08:00
loss_dict.update(loss)
loss_dict["loss"] = list(loss.values())[0]
else:
for idx, loss_func in enumerate(self.loss_func):
loss = loss_func(input, batch)
weight = self.loss_weight[idx]
loss = {key: loss[key] * weight for key in loss}
loss_dict.update(loss)
loss_dict["loss"] = paddle.add_n(list(loss_dict.values()))
2021-05-31 14:17:59 +08:00
return loss_dict
2021-05-31 14:17:31 +08:00
2021-06-03 11:00:47 +08:00
def build_loss(config):
if config is None:
return None
module_class = CombinedLoss(copy.deepcopy(config))
logger.debug("build loss {} success.".format(module_class))
return module_class