Skip to content

Commit cac0a45

Browse files
committed
More test fixes, pool size for 256x256 maxvit models
1 parent e939ed1 commit cac0a45

File tree

3 files changed

+9
-8
lines changed

3 files changed

+9
-8
lines changed

tests/test_models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
'vit_*', 'tnt_*', 'pit_*', 'swin_*', 'coat_*', 'cait_*', '*mixer_*', 'gmlp_*', 'resmlp_*', 'twins_*',
2929
'convit_*', 'levit*', 'visformer*', 'deit*', 'jx_nest_*', 'nest_*', 'xcit_*', 'crossvit_*', 'beit_*',
3030
'poolformer_*', 'volo_*', 'sequencer2d_*', 'swinv2_*', 'pvt_v2*', 'mvitv2*', 'gcvit*', 'efficientformer*',
31-
'coatne?t_*', 'max?vit_*',
31+
'coatnet*', 'coatnext*', 'maxvit*', 'maxxvit*',
3232
]
3333
NUM_NON_STD = len(NON_STD_FILTERS)
3434

timm/models/efficientformer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def _cfg(url='', **kwargs):
2929
'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': None, 'fixed_input_size': True,
3030
'crop_pct': .95, 'interpolation': 'bicubic',
3131
'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD,
32-
'first_conv': 'stem.conv1', 'classifier': 'head',
32+
'first_conv': 'stem.conv1', 'classifier': ('head', 'head_dist'),
3333
**kwargs
3434
}
3535

timm/models/maxxvit.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ def _cfg(url='', **kwargs):
9494
'coatnet_rmlp_0_rw_224': _cfg(url=''),
9595
'coatnet_rmlp_1_rw_224': _cfg(
9696
url=''),
97+
'coatnet_nano_cc_224': _cfg(url=''),
9798
'coatnext_nano_rw_224': _cfg(url=''),
9899

99100
# Trying to be like the CoAtNet paper configs
@@ -105,12 +106,12 @@ def _cfg(url='', **kwargs):
105106
'coatnet_5_224': _cfg(url=''),
106107

107108
# Experimental configs
108-
'maxvit_pico_rw_256': _cfg(url='', input_size=(3, 256, 256)),
109-
'maxvit_nano_rw_256': _cfg(url='', input_size=(3, 256, 256)),
109+
'maxvit_pico_rw_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),
110+
'maxvit_nano_rw_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),
110111
'maxvit_tiny_rw_224': _cfg(url=''),
111-
'maxvit_tiny_rw_256': _cfg(url='', input_size=(3, 256, 256)),
112-
'maxvit_tiny_cm_256': _cfg(url='', input_size=(3, 256, 256)),
113-
'maxxvit_nano_rw_256': _cfg(url='', input_size=(3, 256, 256)),
112+
'maxvit_tiny_rw_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),
113+
'maxvit_tiny_cm_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),
114+
'maxxvit_nano_rw_256': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8)),
114115

115116
# Trying to be like the MaxViT paper configs
116117
'maxvit_tiny_224': _cfg(url=''),
@@ -1052,7 +1053,6 @@ def __init__(
10521053
self.drop_path2 = DropPath(drop_path) if drop_path > 0. else nn.Identity()
10531054

10541055
def _partition_attn(self, x):
1055-
C = x.shape[-1]
10561056
img_size = x.shape[1:3]
10571057
if self.partition_block:
10581058
partitioned = window_partition(x, self.partition_size)
@@ -1415,6 +1415,7 @@ def __init__(
14151415
self.norm1 = norm_act_layer(out_chs[0])
14161416
self.conv2 = create_conv2d(out_chs[0], out_chs[1], kernel_size, stride=1)
14171417

1418+
@torch.jit.ignore
14181419
def init_weights(self, scheme=''):
14191420
named_apply(partial(_init_conv, scheme=scheme), self)
14201421

0 commit comments

Comments
 (0)