Source code for k1lib.callbacks.lossFunctions.shorts

# AUTOGENERATED FILE! PLEASE DON'T EDIT
"""For not very complicated loss functions"""
from ..callbacks import Callback, Callbacks, Cbs
from typing import Callable, Tuple
import torch, k1lib, math, torch.nn.functional as F
__all__ = ["LossF", "LossNLLCross"]
LossFSig = Callable[[Tuple[torch.Tensor, torch.Tensor]], float]
[docs]@k1lib.patch(Cbs) @k1lib.patch(Callback.lossCls) class LossF(Callback): " "
[docs] def __init__(self, lossF:LossFSig): """Generic loss function. Expected variables in :class:`~k1lib.Learner`: - y: result of model. Auto-included in :class:`~k1lib.callbacks.core.CoreNormal` and :class:`~k1lib.callbacks.core.CoreRNN`. Deposits variables into :class:`~k1lib.Learner` at checkpoint ``inLoss``: - lossG: single float tensor value, attached to graph - loss: lossG, but single float value :param lossF: takes in ``(y, yb)`` and returns ``lossG``""" super().__init__() self.lossF = lossF
def inLoss(self): self.l.lossG = self.lossF(self.l.y, self.l.yb) self.l.loss = self.l.lossG.detach().item()
[docs]class LossNLLCross(Callback): " "
[docs] def __init__(self, nll:bool, integrations:bool): """Adds a cross-entropy/negative-likelihood loss function. :param nll: if True, then use :class:`torch.nn.NLLLoss`, else use :class:`torch.nn.CrossEntropyLoss` :param integrations: whether to integrate with :class:`~k1lib.callbacks.accuracy.AccF` callback""" super().__init__(); self.integrations = integrations; self.ownsAccCb = False self.order = 11 # to make sure it's after AccF self.lossF = torch.nn.NLLLoss() if nll else torch.nn.CrossEntropyLoss()
[docs] def attached(self): # delayed initialization, so that learner and cbs has already been attached if self.integrations: if "AccF" not in self.cbs: self.accuracyCb = Cbs.AccF() self.cbs.add(self.accuracyCb) self.ownsAccCb = True else: self.accuracyCb = self.cbs.AccF
def inLoss(self): self.l.lossG = self.lossF(self.l.y, self.l.yb) self.l.loss = self.l.lossG.detach().item()
[docs] def detach(self): super().detach() if self.accuracyCb != None: if self.ownsAccCb: self.accuracyCb.detach() self.accuracyCb = None
@k1lib.patch(Cbs) @k1lib.patch(Callback.lossCls) class LossCrossEntropy(LossNLLCross): def __init__(self, integrations:bool=True): """Cross entropy loss function. Deposits into :class:`~k1lib.Learner` the same variables as in :class:`LossF`.""" super().__init__(False, integrations) @k1lib.patch(Cbs) @k1lib.patch(Callback.lossCls) class LossNLL(LossNLLCross): def __init__(self, integrations:bool=True): """Negative log loss function. Deposits into :class:`~k1lib.Learner` the same variables as in :class:`LossF`.""" super().__init__(True, integrations)