mirror of
https://github.com/rasbt/LLMs-from-scratch.git
synced 2026-04-10 12:33:42 +00:00
Remove reundant dropout in MLP module (#105)
This commit is contained in:
committed by
GitHub
parent
dd115c1374
commit
3829ccdb34
@@ -144,7 +144,6 @@ class FeedForward(nn.Module):
|
||||
nn.Linear(cfg["emb_dim"], 4 * cfg["emb_dim"]),
|
||||
GELU(),
|
||||
nn.Linear(4 * cfg["emb_dim"], cfg["emb_dim"]),
|
||||
nn.Dropout(cfg["drop_rate"])
|
||||
)
|
||||
|
||||
def forward(self, x):
|
||||
|
||||
Reference in New Issue
Block a user