This commit is contained in:
rasbt
2024-04-04 07:58:41 -05:00
parent ccd7cebbb3
commit c8cffefb6f
3 changed files with 12 additions and 12 deletions

View File

@@ -165,7 +165,7 @@
" def __init__(self, cfg):\n",
" super().__init__()\n",
" self.tok_emb = nn.Embedding(cfg[\"vocab_size\"], cfg[\"emb_dim\"])\n",
" self.pos_emb = nn.Embedding(cfg[\"ctx_len\"], cfg[\"emb_dim\"])\n",
" self.pos_emb = nn.Embedding(cfg[\"context_length\"], cfg[\"emb_dim\"])\n",
" self.drop_emb = nn.Dropout(cfg[\"drop_rate\"])\n",
" \n",
" # Use a placeholder for TransformerBlock\n",
@@ -943,7 +943,7 @@
" self.att = MultiHeadAttention(\n",
" d_in=cfg[\"emb_dim\"],\n",
" d_out=cfg[\"emb_dim\"],\n",
" context_length=cfg[\"ctx_len\"],\n",
" context_length=cfg[\"context_length\"],\n",
" num_heads=cfg[\"n_heads\"], \n",
" dropout=cfg[\"drop_rate\"],\n",
" qkv_bias=cfg[\"qkv_bias\"])\n",
@@ -1065,7 +1065,7 @@
" def __init__(self, cfg):\n",
" super().__init__()\n",
" self.tok_emb = nn.Embedding(cfg[\"vocab_size\"], cfg[\"emb_dim\"])\n",
" self.pos_emb = nn.Embedding(cfg[\"ctx_len\"], cfg[\"emb_dim\"])\n",
" self.pos_emb = nn.Embedding(cfg[\"context_length\"], cfg[\"emb_dim\"])\n",
" self.drop_emb = nn.Dropout(cfg[\"drop_rate\"])\n",
" \n",
" self.trf_blocks = nn.Sequential(\n",
@@ -1429,7 +1429,7 @@
" model=model,\n",
" idx=encoded_tensor, \n",
" max_new_tokens=6, \n",
" context_size=GPT_CONFIG_124M[\"ctx_len\"]\n",
" context_size=GPT_CONFIG_124M[\"context_length\"]\n",
")\n",
"\n",
"print(\"Output:\", out)\n",