PaddleClas/ppcls/loss/__init__.py

80 lines
2.9 KiB
Python
Raw Normal View History

2021-05-31 14:17:59 +08:00
import copy
2021-06-03 11:00:47 +08:00
2021-05-31 14:17:31 +08:00
import paddle
import paddle.nn as nn
2021-06-03 11:00:47 +08:00
from ppcls.utils import logger
2021-05-31 14:17:31 +08:00
2021-07-13 14:38:01 +08:00
from .celoss import CELoss, MixCELoss
2021-06-21 22:25:07 +08:00
from .googlenetloss import GoogLeNetLoss
2021-06-03 11:00:47 +08:00
from .centerloss import CenterLoss
2021-05-31 14:17:59 +08:00
from .emlloss import EmlLoss
2021-06-03 11:00:47 +08:00
from .msmloss import MSMLoss
from .npairsloss import NpairsLoss
2021-05-31 14:17:59 +08:00
from .trihardloss import TriHardLoss
2021-06-03 11:00:47 +08:00
from .triplet import TripletLoss, TripletLossV2
2022-08-23 18:59:30 +08:00
from .tripletangularmarginloss import TripletAngularMarginLoss
2021-06-04 16:44:24 +08:00
from .supconloss import SupConLoss
2021-06-03 19:36:26 +08:00
from .pairwisecosface import PairwiseCosface
from .dmlloss import DMLLoss
from .distanceloss import DistanceLoss
2022-09-08 16:11:25 +08:00
from .softtargetceloss import SoftTargetCrossEntropy
2021-06-03 11:00:47 +08:00
from .distillationloss import DistillationCELoss
from .distillationloss import DistillationGTCELoss
from .distillationloss import DistillationDMLLoss
from .distillationloss import DistillationDistanceLoss
from .distillationloss import DistillationRKDLoss
2022-02-28 19:11:50 +08:00
from .distillationloss import DistillationKLDivLoss
from .distillationloss import DistillationDKDLoss
from .distillationloss import DistillationMultiLabelLoss
from .distillationloss import DistillationDISTLoss
from .distillationloss import DistillationPairLoss
2021-09-26 15:05:13 +08:00
from .multilabelloss import MultiLabelLoss
2022-02-28 19:11:50 +08:00
from .afdloss import AFDLoss
2021-06-03 11:00:47 +08:00
2022-02-23 19:50:39 +08:00
from .deephashloss import DSHSDLoss
from .deephashloss import LCDSHLoss
from .deephashloss import DCHLoss
2021-11-03 18:09:34 +08:00
2021-05-31 14:17:59 +08:00
class CombinedLoss(nn.Layer):
def __init__(self, config_list):
super().__init__()
self.loss_func = []
self.loss_weight = []
assert isinstance(config_list, list), (
'operator config should be a list')
for config in config_list:
assert isinstance(config,
dict) and len(config) == 1, "yaml format error"
name = list(config)[0]
param = config[name]
assert "weight" in param, "weight must be in param, but param just contains {}".format(
param.keys())
self.loss_weight.append(param.pop("weight"))
self.loss_func.append(eval(name)(**param))
2022-04-19 14:26:42 +08:00
self.loss_func = nn.LayerList(self.loss_func)
2021-05-31 14:17:31 +08:00
2021-05-31 14:17:59 +08:00
def __call__(self, input, batch):
loss_dict = {}
# just for accelerate classification traing speed
if len(self.loss_func) == 1:
loss = self.loss_func[0](input, batch)
2021-05-31 14:17:59 +08:00
loss_dict.update(loss)
loss_dict["loss"] = list(loss.values())[0]
else:
for idx, loss_func in enumerate(self.loss_func):
loss = loss_func(input, batch)
weight = self.loss_weight[idx]
loss = {key: loss[key] * weight for key in loss}
loss_dict.update(loss)
loss_dict["loss"] = paddle.add_n(list(loss_dict.values()))
2021-05-31 14:17:59 +08:00
return loss_dict
2021-05-31 14:17:31 +08:00
2021-06-03 11:00:47 +08:00
2021-05-31 14:17:59 +08:00
def build_loss(config):
2021-06-03 12:24:48 +08:00
module_class = CombinedLoss(copy.deepcopy(config))
logger.debug("build loss {} success.".format(module_class))
2021-05-31 14:17:59 +08:00
return module_class