Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions segmentation_models_pytorch/losses/focal.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ def __init__(
alpha: Optional[float] = None,
gamma: Optional[float] = 2.0,
ignore_index: Optional[int] = None,
from_logits: bool = True,
eps: float = 1e-7,
reduction: Optional[str] = "mean",
normalized: bool = False,
reduced_threshold: Optional[float] = None,
Expand All @@ -25,6 +27,8 @@ def __init__(

Args:
mode: Loss mode 'binary', 'multiclass' or 'multilabel'
from_logits: If True, assumes input is raw logits
eps: Small value used for numerical stability when converting probabilities to logits .
alpha: Prior probability of having positive value in target.
gamma: Power factor for dampening weight (focal strength).
ignore_index: If not None, targets may contain values to be ignored.
Expand All @@ -51,8 +55,11 @@ def __init__(
raise ValueError("class_weights are not supported with mode=binary")

self.mode = mode
self.from_logits = from_logits
self.ignore_index = ignore_index
self.reduction = reduction
self.eps = eps

self.focal_loss_fn = partial(
focal_loss_with_logits,
alpha=alpha,
Expand All @@ -68,6 +75,18 @@ def __init__(
)

def forward(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor:

if not self.from_logits:
y_pred = torch.clamp(y_pred, self.eps, 1 - self.eps)

if self.mode in {BINARY_MODE, MULTILABEL_MODE}:
# inverse sigmoid
y_pred = torch.log(y_pred / (1 - y_pred))

elif self.mode == MULTICLASS_MODE:
# convert softmax probabilities to log-space
y_pred = torch.log(y_pred)

if self.mode == BINARY_MODE:
y_true = y_true.reshape(-1)
y_pred = y_pred.reshape(-1)
Expand Down
26 changes: 26 additions & 0 deletions tests/test_losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,32 @@
)


def test_focal_loss_from_logits_false_multiclass():
torch.manual_seed(0)

input_logits = torch.tensor(
[[0.0, 10.0, 0.0], [10.0, 0.0, 0.0], [0.0, 0.0, 10.0]]
).float()
target = torch.tensor([1, 0, 2]).long()
# Convert to probabilities
input_probs = torch.softmax(input_logits, dim=1)

loss_logits = smp.losses.FocalLoss(
mode="multiclass",
from_logits=True,
)(input_logits, target)

loss_probs = smp.losses.FocalLoss(
mode="multiclass",
from_logits=False,
)(input_probs, target)

# They should be close (not exact due to constant shift issue)
assert torch.isfinite(loss_probs)
assert torch.isfinite(loss_logits)
assert abs(loss_logits - loss_probs) < 0.2


def test_focal_loss_with_logits():
input_good = torch.tensor([10, -10, 10]).float()
input_bad = torch.tensor([-1, 2, 0]).float()
Expand Down