Fix typo in LayerNorm (#285)

Co-authored-by: Artem Lukin <artyom.lukin98@gmail.com>
This commit is contained in:
Artem Lukin
2023-10-24 21:47:21 +02:00
committed by GitHub
parent 53fe345e85
commit fb4ac25174

View File

@@ -10,7 +10,7 @@ class FeedForward(nn.Module):
def __init__(self, dim, hidden_dim, dropout = 0.):
super().__init__()
self.net = nn.Sequential(
nn.Layernorm(dim),
nn.LayerNorm(dim),
nn.Linear(dim, hidden_dim),
nn.GELU(),
nn.Dropout(dropout),