mirror of
https://github.com/rasbt/LLMs-from-scratch.git
synced 2026-04-10 12:33:42 +00:00
Readability and code quality improvements (#959)
* Consistent dataset naming * consistent section headers
This commit is contained in:
committed by
GitHub
parent
7b1f740f74
commit
be5e2a3331
@@ -54,7 +54,7 @@
|
||||
"<br>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"## Using BPE from `tiktoken`"
|
||||
"## 1. Using BPE from `tiktoken`"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -157,7 +157,7 @@
|
||||
"<br>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"## Using the original BPE implementation used in GPT-2"
|
||||
"## 2. Using the original BPE implementation used in GPT-2"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -247,7 +247,7 @@
|
||||
"<br>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"## Using the BPE via Hugging Face transformers"
|
||||
"## 3. Using the BPE via Hugging Face transformers"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -355,7 +355,7 @@
|
||||
"<br>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"## Using my own from-scratch BPE tokenizer"
|
||||
"## 4. Using my own from-scratch BPE tokenizer"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -449,7 +449,7 @@
|
||||
"<br>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"## A quick performance benchmark"
|
||||
"## 5. A quick performance benchmark"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -468,7 +468,8 @@
|
||||
"id": "9c0ae9f0-47a1-4e7f-a210-e1d2721f4d1e",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Original OpenAI GPT-2 tokenizer"
|
||||
" \n",
|
||||
"### 5.1 Original OpenAI GPT-2 tokenizer"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -494,7 +495,8 @@
|
||||
"id": "ef2ce3f3-1f81-47ce-b563-99fe2c7a1e90",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Tiktoken OpenAI GPT-2 tokenizer"
|
||||
" \n",
|
||||
"### 5.2 Tiktoken OpenAI GPT-2 tokenizer"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -520,7 +522,8 @@
|
||||
"id": "0c748de8-273e-42df-b078-3a510106da60",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Hugging Face OpenAI GPT-2 tokenizer"
|
||||
" \n",
|
||||
"### 5.3 Hugging Face OpenAI GPT-2 tokenizer"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -614,7 +617,8 @@
|
||||
"id": "91ac2876-f36e-498c-bd75-8597a39f2d4b",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### My own GPT-2 tokenizer (for educational purposes)"
|
||||
" \n",
|
||||
"### 5.4 My own GPT-2 tokenizer (for educational purposes)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -652,7 +656,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.16"
|
||||
"version": "3.13.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
Reference in New Issue
Block a user