attention re-use in lookup vit should use pre-softmax attention matrix

This commit is contained in:
lucidrains
2024-07-19 19:23:38 -07:00
parent 4b2c00cb63
commit 9992a615d1
2 changed files with 13 additions and 12 deletions

View File

@@ -6,7 +6,7 @@ with open('README.md') as f:
setup(
name = 'vit-pytorch',
packages = find_packages(exclude=['examples']),
version = '1.7.3',
version = '1.7.4',
license='MIT',
description = 'Vision Transformer (ViT) - Pytorch',
long_description=long_description,