seeing a signal with dual patchnorm in another repository, fully incorporate

This commit is contained in:
Phil Wang
2023-02-06 09:45:12 -08:00
parent bdaf2d1491
commit 46dcaf23d8
4 changed files with 11 additions and 2 deletions

View File

@@ -118,7 +118,9 @@ class ViT(nn.Module):
self.to_patch_embedding = nn.Sequential(
Rearrange('b c (h p1) (w p2) -> b (h w) (p1 p2 c)', p1 = patch_height, p2 = patch_width),
nn.LayerNorm(patch_dim),
nn.Linear(patch_dim, dim),
nn.LayerNorm(dim)
)
self.pos_embedding = nn.Parameter(torch.randn(1, num_patches + 1, dim))