Skip to content

Commit e7f0db8

Browse files
committed
Fix drop/drop_path arg on MLP-Mixer model. Fix #641
1 parent 7077f16 commit e7f0db8

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

timm/models/mlp_mixer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -96,8 +96,8 @@ def __init__(
9696
mlp_layer=Mlp,
9797
norm_layer=partial(nn.LayerNorm, eps=1e-6),
9898
act_layer=nn.GELU,
99-
drop=0.,
100-
drop_path=0.,
99+
drop_rate=0.,
100+
drop_path_rate=0.,
101101
nlhb=False,
102102
):
103103
super().__init__()
@@ -108,7 +108,7 @@ def __init__(
108108
self.blocks = nn.Sequential(*[
109109
MixerBlock(
110110
hidden_dim, self.stem.num_patches, tokens_dim, channels_dim,
111-
mlp_layer=mlp_layer, norm_layer=norm_layer, act_layer=act_layer, drop=drop, drop_path=drop_path)
111+
mlp_layer=mlp_layer, norm_layer=norm_layer, act_layer=act_layer, drop=drop_rate, drop_path=drop_path_rate)
112112
for _ in range(num_blocks)])
113113
self.norm = norm_layer(hidden_dim)
114114
self.head = nn.Linear(hidden_dim, self.num_classes) # zero init

0 commit comments

Comments
 (0)