Remove reundant dropout in MLP module (#105)

This commit is contained in:
Sebastian Raschka
2024-04-03 20:19:08 -05:00
committed by GitHub
parent dd115c1374
commit 3829ccdb34
11 changed files with 202 additions and 266 deletions

File diff suppressed because one or more lines are too long

View File

@@ -152,7 +152,6 @@ class FeedForward(nn.Module):
nn.Linear(cfg["emb_dim"], 4 * cfg["emb_dim"]),
GELU(),
nn.Linear(4 * cfg["emb_dim"], cfg["emb_dim"]),
nn.Dropout(cfg["drop_rate"])
)
def forward(self, x):