Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 20 additions & 1 deletion segmentation_models_pytorch/losses/focal.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import torch
from torch.nn.modules.loss import _Loss
from ._functional import focal_loss_with_logits
from ._functional import focal_loss_with_logits,softmax_focal_loss_with_logits
Comment thread
Harsh-2005d marked this conversation as resolved.
Outdated
from .constants import BINARY_MODE, MULTICLASS_MODE, MULTILABEL_MODE

__all__ = ["FocalLoss"]
Expand All @@ -13,6 +13,8 @@ class FocalLoss(_Loss):
def __init__(
self,
mode: str,
from_logits: bool = True,
eps: float = 1e-7,
alpha: Optional[float] = None,
gamma: Optional[float] = 2.0,
ignore_index: Optional[int] = None,
Comment thread
Harsh-2005d marked this conversation as resolved.
Outdated
Expand All @@ -24,6 +26,8 @@ def __init__(

Args:
mode: Loss mode 'binary', 'multiclass' or 'multilabel'
from_logits: If True, assumes input is raw logits
eps: Small value used for numerical stability when converting probabilities to logits .
alpha: Prior probability of having positive value in target.
gamma: Power factor for dampening weight (focal strength).
ignore_index: If not None, targets may contain values to be ignored.
Expand All @@ -44,8 +48,11 @@ def __init__(
super().__init__()

self.mode = mode
self.from_logits=from_logits
Comment thread
Harsh-2005d marked this conversation as resolved.
Outdated
self.ignore_index = ignore_index
self.reduction = reduction
self.eps=eps
Comment thread
Harsh-2005d marked this conversation as resolved.
Outdated

self.focal_loss_fn = partial(
focal_loss_with_logits,
alpha=alpha,
Expand All @@ -56,6 +63,18 @@ def __init__(
)

def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor:

if not self.from_logits:
y_pred = torch.clamp(y_pred, self.eps, 1 - self.eps)

if self.mode in {BINARY_MODE, MULTILABEL_MODE}:
# inverse sigmoid
y_pred = torch.log(y_pred / (1 - y_pred))

elif self.mode == MULTICLASS_MODE:
# convert softmax probabilities to log-space
y_pred = torch.log(y_pred)

if self.mode in {BINARY_MODE, MULTILABEL_MODE}:
y_true = y_true.reshape(-1)
y_pred = y_pred.reshape(-1)
Expand Down
26 changes: 26 additions & 0 deletions tests/test_losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,32 @@
MCCLoss,
)

def test_focal_loss_from_logits_false_multiclass():
torch.manual_seed(0)

input_logits = torch.tensor(
[[0.0, 10.0, 0.0],
[10.0, 0.0, 0.0],
[0.0, 0.0, 10.0]]
).float()
target = torch.tensor([1, 0, 2]).long()
# Convert to probabilities
input_probs = torch.softmax(input_logits, dim=1)

loss_logits = smp.losses.FocalLoss(
mode="multiclass",
from_logits=True,
)(input_logits, target)

loss_probs = smp.losses.FocalLoss(
mode="multiclass",
from_logits=False,
)(input_probs, target)

# They should be close (not exact due to constant shift issue)
assert torch.isfinite(loss_probs)
assert torch.isfinite(loss_logits)
assert abs(loss_logits - loss_probs) < 0.2

def test_focal_loss_with_logits():
input_good = torch.tensor([10, -10, 10]).float()
Expand Down