diff --git a/setup.py b/setup.py index 77f52cb..26bb607 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup, find_packages setup( name = 'vit-pytorch', packages = find_packages(exclude=['examples']), - version = '0.15.0', + version = '0.15.1', license='MIT', description = 'Vision Transformer (ViT) - Pytorch', author = 'Phil Wang', diff --git a/vit_pytorch/levit.py b/vit_pytorch/levit.py index 7db5c37..e2c7e66 100644 --- a/vit_pytorch/levit.py +++ b/vit_pytorch/levit.py @@ -81,7 +81,6 @@ class Attention(nn.Module): def apply_pos_bias(self, fmap): bias = self.pos_bias(self.pos_indices) bias = rearrange(bias, 'i j h -> () h i j') - print(bias.shape, fmap.shape) return fmap + bias def forward(self, x):