Skip to content

Commit 8906556

Browse files
committed
import Sequence from typing
Signed-off-by: ytl0623 <[email protected]>
1 parent e714a3c commit 8906556

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

monai/losses/focal_loss.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from __future__ import annotations
1313

1414
import warnings
15-
from typing import Optional, Sequence
15+
from typing import Optional, Sequence as SequenceType
1616

1717
import torch
1818
import torch.nn.functional as F
@@ -207,7 +207,7 @@ def forward(self, input: torch.Tensor, target: torch.Tensor) -> torch.Tensor:
207207

208208

209209
def softmax_focal_loss(
210-
input: torch.Tensor, target: torch.Tensor, gamma: float = 2.0, alpha: float | Sequence[float] | None = None
210+
input: torch.Tensor, target: torch.Tensor, gamma: float = 2.0, alpha: float | SequenceType[float] | None = None
211211
) -> torch.Tensor:
212212
"""
213213
FL(pt) = -alpha * (1 - pt)**gamma * log(pt)
@@ -240,7 +240,7 @@ def softmax_focal_loss(
240240

241241

242242
def sigmoid_focal_loss(
243-
input: torch.Tensor, target: torch.Tensor, gamma: float = 2.0, alpha: float | Sequence[float] | None = None
243+
input: torch.Tensor, target: torch.Tensor, gamma: float = 2.0, alpha: float | SequenceType[float] | None = None
244244
) -> torch.Tensor:
245245
"""
246246
FL(pt) = -alpha * (1 - pt)**gamma * log(pt)

0 commit comments

Comments
 (0)