Skip to content

Commit 1b89ad2

Browse files
committed
Add chinese comment
1 parent d36f0c8 commit 1b89ad2

File tree

1 file changed

+12
-13
lines changed

1 file changed

+12
-13
lines changed

monai/networks/nets/attentionunet.py

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -22,38 +22,37 @@
2222
__all__ = ["AttentionUnet"]
2323

2424

25+
# `ConvBlock` 是一個兩層卷積的基本模組,每一層都有激活函數 (ReLU)、標準化層 (BatchNorm)、Dropout 等組成。
2526
class ConvBlock(nn.Module):
26-
2727
def __init__(
2828
self,
29-
spatial_dims: int,
30-
in_channels: int,
31-
out_channels: int,
32-
kernel_size: Sequence[int] | int = 3,
33-
strides: int = 1,
34-
dropout=0.0,
29+
spatial_dims: int, # 定義輸入影像的空間維度 (2D 或 3D)
30+
in_channels: int, # 定義輸入通道數
31+
out_channels: int, # 定義輸出通道數 (即特徵圖數)
32+
kernel_size: Sequence[int] | int = 3, # 卷積核大小,通常設定為 3
33+
strides: int = 1, # 步幅,決定輸出特徵圖的縮放
34+
dropout=0.0, # Dropout 機率,默認為 0,意味著不進行隨機失活
3535
):
3636
super().__init__()
37+
# 構建兩層卷積層,每層有激活函數、標準化和可選的 Dropout
3738
layers = [
3839
Convolution(
3940
spatial_dims=spatial_dims,
4041
in_channels=in_channels,
4142
out_channels=out_channels,
4243
kernel_size=kernel_size,
4344
strides=strides,
44-
padding=None,
45-
adn_ordering="NDA",
46-
act="relu",
47-
norm=Norm.BATCH,
48-
dropout=dropout,
45+
adn_ordering="NDA", # Ordering: Norm -> Dropout -> Activation
46+
act="relu", # 激活函數為 ReLU
47+
norm=Norm.BATCH, # 使用 Batch Normalization
48+
dropout=dropout, # Dropout 機率
4949
),
5050
Convolution(
5151
spatial_dims=spatial_dims,
5252
in_channels=out_channels,
5353
out_channels=out_channels,
5454
kernel_size=kernel_size,
5555
strides=1,
56-
padding=None,
5756
adn_ordering="NDA",
5857
act="relu",
5958
norm=Norm.BATCH,

0 commit comments

Comments
 (0)