Initial commit
@@ -0,0 +1,142 @@
|
||||
# Imports needed
|
||||
import os
|
||||
|
||||
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
|
||||
import tensorflow as tf
|
||||
from tensorflow import keras
|
||||
from tensorflow.keras import layers
|
||||
from tensorflow.keras.preprocessing.image import ImageDataGenerator
|
||||
|
||||
img_height = 28
|
||||
img_width = 28
|
||||
batch_size = 2
|
||||
|
||||
model = keras.Sequential(
|
||||
[
|
||||
layers.Input((28, 28, 1)),
|
||||
layers.Conv2D(16, 3, padding="same"),
|
||||
layers.Conv2D(32, 3, padding="same"),
|
||||
layers.MaxPooling2D(),
|
||||
layers.Flatten(),
|
||||
layers.Dense(10),
|
||||
]
|
||||
)
|
||||
|
||||
# METHOD 1
|
||||
# ==================================================== #
|
||||
# Using dataset_from_directory #
|
||||
# ==================================================== #
|
||||
ds_train = tf.keras.preprocessing.image_dataset_from_directory(
|
||||
"data/mnist_subfolders/",
|
||||
labels="inferred",
|
||||
label_mode="int", # categorical, binary
|
||||
# class_names=['0', '1', '2', '3', ...]
|
||||
color_mode="grayscale",
|
||||
batch_size=batch_size,
|
||||
image_size=(img_height, img_width), # reshape if not in this size
|
||||
shuffle=True,
|
||||
seed=123,
|
||||
validation_split=0.1,
|
||||
subset="training",
|
||||
)
|
||||
|
||||
ds_validation = tf.keras.preprocessing.image_dataset_from_directory(
|
||||
"data/mnist_subfolders/",
|
||||
labels="inferred",
|
||||
label_mode="int", # categorical, binary
|
||||
# class_names=['0', '1', '2', '3', ...]
|
||||
color_mode="grayscale",
|
||||
batch_size=batch_size,
|
||||
image_size=(img_height, img_width), # reshape if not in this size
|
||||
shuffle=True,
|
||||
seed=123,
|
||||
validation_split=0.1,
|
||||
subset="validation",
|
||||
)
|
||||
|
||||
|
||||
def augment(x, y):
|
||||
image = tf.image.random_brightness(x, max_delta=0.05)
|
||||
return image, y
|
||||
|
||||
|
||||
ds_train = ds_train.map(augment)
|
||||
|
||||
# Custom Loops
|
||||
for epochs in range(10):
|
||||
for x, y in ds_train:
|
||||
# train here
|
||||
pass
|
||||
|
||||
|
||||
model.compile(
|
||||
optimizer=keras.optimizers.Adam(),
|
||||
loss=[keras.losses.SparseCategoricalCrossentropy(from_logits=True),],
|
||||
metrics=["accuracy"],
|
||||
)
|
||||
|
||||
model.fit(ds_train, epochs=10, verbose=2)
|
||||
|
||||
|
||||
# METHOD 2
|
||||
# ================================================================== #
|
||||
# ImageDataGenerator and flow_from_directory #
|
||||
# ================================================================== #
|
||||
|
||||
datagen = ImageDataGenerator(
|
||||
rescale=1.0 / 255,
|
||||
rotation_range=5,
|
||||
zoom_range=(0.95, 0.95),
|
||||
horizontal_flip=False,
|
||||
vertical_flip=False,
|
||||
data_format="channels_last",
|
||||
validation_split=0.0,
|
||||
dtype=tf.float32,
|
||||
)
|
||||
|
||||
train_generator = datagen.flow_from_directory(
|
||||
"data/mnist_subfolders/",
|
||||
target_size=(img_height, img_width),
|
||||
batch_size=batch_size,
|
||||
color_mode="grayscale",
|
||||
class_mode="sparse",
|
||||
shuffle=True,
|
||||
subset="training",
|
||||
seed=123,
|
||||
)
|
||||
|
||||
|
||||
def training():
|
||||
pass
|
||||
|
||||
|
||||
# Custom Loops
|
||||
for epoch in range(10):
|
||||
num_batches = 0
|
||||
|
||||
for x, y in ds_train:
|
||||
num_batches += 1
|
||||
|
||||
# do training
|
||||
training()
|
||||
|
||||
if num_batches == 25: # len(train_dataset)/batch_size
|
||||
break
|
||||
|
||||
# Redo model.compile to reset the optimizer states
|
||||
model.compile(
|
||||
optimizer=keras.optimizers.Adam(),
|
||||
loss=[keras.losses.SparseCategoricalCrossentropy(from_logits=True),],
|
||||
metrics=["accuracy"],
|
||||
)
|
||||
|
||||
# using model.fit (note steps_per_epoch)
|
||||
model.fit(
|
||||
train_generator,
|
||||
epochs=10,
|
||||
steps_per_epoch=25,
|
||||
verbose=2,
|
||||
# if we had a validation generator:
|
||||
# validation_data=validation_generator,
|
||||
# valiation_steps=len(validation_set)/batch_size),
|
||||
)
|
||||
@@ -0,0 +1,52 @@
|
||||
import os
|
||||
|
||||
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
|
||||
import tensorflow as tf
|
||||
import pandas as pd
|
||||
from tensorflow import keras
|
||||
from tensorflow.keras import layers
|
||||
|
||||
directory = "data/mnist_images_csv/"
|
||||
df = pd.read_csv(directory + "train.csv")
|
||||
|
||||
file_paths = df["file_name"].values
|
||||
labels = df["label"].values
|
||||
ds_train = tf.data.Dataset.from_tensor_slices((file_paths, labels))
|
||||
|
||||
|
||||
def read_image(image_file, label):
|
||||
image = tf.io.read_file(directory + image_file)
|
||||
image = tf.image.decode_image(image, channels=1, dtype=tf.float32)
|
||||
return image, label
|
||||
|
||||
|
||||
def augment(image, label):
|
||||
# data augmentation here
|
||||
return image, label
|
||||
|
||||
|
||||
ds_train = ds_train.map(read_image).map(augment).batch(2)
|
||||
|
||||
for epoch in range(10):
|
||||
for x, y in ds_train:
|
||||
# train here
|
||||
pass
|
||||
|
||||
model = keras.Sequential(
|
||||
[
|
||||
layers.Input((28, 28, 1)),
|
||||
layers.Conv2D(16, 3, padding="same"),
|
||||
layers.Conv2D(32, 3, padding="same"),
|
||||
layers.MaxPooling2D(),
|
||||
layers.Flatten(),
|
||||
layers.Dense(10),
|
||||
]
|
||||
)
|
||||
|
||||
model.compile(
|
||||
optimizer=keras.optimizers.Adam(),
|
||||
loss=[keras.losses.SparseCategoricalCrossentropy(from_logits=True),],
|
||||
metrics=["accuracy"],
|
||||
)
|
||||
|
||||
model.fit(ds_train, epochs=10, verbose=2)
|
||||
@@ -0,0 +1,46 @@
|
||||
import os
|
||||
|
||||
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
|
||||
import tensorflow as tf
|
||||
import pandas as pd
|
||||
from tensorflow import keras
|
||||
from tensorflow.keras import layers
|
||||
import pathlib # pathlib is in standard library
|
||||
|
||||
batch_size = 2
|
||||
img_height = 28
|
||||
img_width = 28
|
||||
|
||||
directory = "data/mnist_images_only/"
|
||||
ds_train = tf.data.Dataset.list_files(str(pathlib.Path(directory + "*.jpg")))
|
||||
|
||||
|
||||
def process_path(file_path):
|
||||
image = tf.io.read_file(file_path)
|
||||
image = tf.image.decode_jpeg(image, channels=1)
|
||||
label = tf.strings.split(file_path, "\\")
|
||||
label = tf.strings.substr(label, pos=0, len=1)[2]
|
||||
label = tf.strings.to_number(label, out_type=tf.int64)
|
||||
return image, label
|
||||
|
||||
|
||||
ds_train = ds_train.map(process_path).batch(batch_size)
|
||||
|
||||
model = keras.Sequential(
|
||||
[
|
||||
layers.Input((28, 28, 1)),
|
||||
layers.Conv2D(16, 3, padding="same"),
|
||||
layers.Conv2D(32, 3, padding="same"),
|
||||
layers.MaxPooling2D(),
|
||||
layers.Flatten(),
|
||||
layers.Dense(10),
|
||||
]
|
||||
)
|
||||
|
||||
model.compile(
|
||||
optimizer=keras.optimizers.Adam(),
|
||||
loss=[keras.losses.SparseCategoricalCrossentropy(from_logits=True),],
|
||||
metrics=["accuracy"],
|
||||
)
|
||||
|
||||
model.fit(ds_train, epochs=10, verbose=2)
|
||||
|
After Width: | Height: | Size: 602 B |
|
After Width: | Height: | Size: 628 B |
|
After Width: | Height: | Size: 539 B |
|
After Width: | Height: | Size: 579 B |
|
After Width: | Height: | Size: 634 B |
|
After Width: | Height: | Size: 492 B |
|
After Width: | Height: | Size: 427 B |
|
After Width: | Height: | Size: 491 B |
|
After Width: | Height: | Size: 475 B |
|
After Width: | Height: | Size: 460 B |
|
After Width: | Height: | Size: 604 B |
|
After Width: | Height: | Size: 592 B |
|
After Width: | Height: | Size: 571 B |
|
After Width: | Height: | Size: 564 B |
|
After Width: | Height: | Size: 541 B |
|
After Width: | Height: | Size: 646 B |
|
After Width: | Height: | Size: 583 B |
|
After Width: | Height: | Size: 630 B |
|
After Width: | Height: | Size: 609 B |
|
After Width: | Height: | Size: 591 B |
|
After Width: | Height: | Size: 625 B |
|
After Width: | Height: | Size: 582 B |
|
After Width: | Height: | Size: 602 B |
|
After Width: | Height: | Size: 593 B |
|
After Width: | Height: | Size: 600 B |
|
After Width: | Height: | Size: 514 B |
|
After Width: | Height: | Size: 575 B |
|
After Width: | Height: | Size: 571 B |
|
After Width: | Height: | Size: 579 B |
|
After Width: | Height: | Size: 617 B |
|
After Width: | Height: | Size: 603 B |
|
After Width: | Height: | Size: 589 B |
|
After Width: | Height: | Size: 513 B |
|
After Width: | Height: | Size: 531 B |
|
After Width: | Height: | Size: 563 B |
|
After Width: | Height: | Size: 555 B |
|
After Width: | Height: | Size: 523 B |
|
After Width: | Height: | Size: 621 B |
|
After Width: | Height: | Size: 571 B |
|
After Width: | Height: | Size: 540 B |
|
After Width: | Height: | Size: 622 B |
|
After Width: | Height: | Size: 598 B |
|
After Width: | Height: | Size: 578 B |
|
After Width: | Height: | Size: 596 B |
|
After Width: | Height: | Size: 599 B |
|
After Width: | Height: | Size: 554 B |
|
After Width: | Height: | Size: 548 B |
|
After Width: | Height: | Size: 552 B |
|
After Width: | Height: | Size: 539 B |
|
After Width: | Height: | Size: 570 B |
@@ -0,0 +1,52 @@
|
||||
file_name,label
|
||||
0_1.jpg, 0
|
||||
0_2.jpg, 0
|
||||
0_3.jpg, 0
|
||||
0_4.jpg, 0
|
||||
0_5.jpg, 0
|
||||
1_1.jpg, 1
|
||||
1_2.jpg, 1
|
||||
1_3.jpg, 1
|
||||
1_4.jpg, 1
|
||||
1_5.jpg, 1
|
||||
2_1.jpg, 2
|
||||
2_2.jpg, 2
|
||||
2_3.jpg, 2
|
||||
2_4.jpg, 2
|
||||
2_5.jpg, 2
|
||||
3_1.jpg, 3
|
||||
3_2.jpg, 3
|
||||
3_3.jpg, 3
|
||||
3_4.jpg, 3
|
||||
3_5.jpg, 3
|
||||
4_1.jpg, 4
|
||||
4_2.jpg, 4
|
||||
4_3.jpg, 4
|
||||
4_4.jpg, 4
|
||||
4_5.jpg, 4
|
||||
5_1.jpg, 5
|
||||
5_2.jpg, 5
|
||||
5_3.jpg, 5
|
||||
5_4.jpg, 5
|
||||
5_5.jpg, 5
|
||||
6_1.jpg, 6
|
||||
6_2.jpg, 6
|
||||
6_3.jpg, 6
|
||||
6_4.jpg, 6
|
||||
6_5.jpg, 6
|
||||
7_1.jpg, 7
|
||||
7_2.jpg, 7
|
||||
7_3.jpg, 7
|
||||
7_4.jpg, 7
|
||||
7_5.jpg, 7
|
||||
8_1.jpg, 8
|
||||
8_2.jpg, 8
|
||||
8_3.jpg, 8
|
||||
8_4.jpg, 8
|
||||
8_5.jpg, 8
|
||||
9_1.jpg, 9
|
||||
9_2.jpg, 9
|
||||
9_3.jpg, 9
|
||||
9_4.jpg, 9
|
||||
9_5.jpg, 9
|
||||
|
||||
|
|
After Width: | Height: | Size: 602 B |
|
After Width: | Height: | Size: 628 B |
|
After Width: | Height: | Size: 539 B |
|
After Width: | Height: | Size: 579 B |
|
After Width: | Height: | Size: 634 B |
|
After Width: | Height: | Size: 492 B |
|
After Width: | Height: | Size: 427 B |
|
After Width: | Height: | Size: 491 B |
|
After Width: | Height: | Size: 475 B |
|
After Width: | Height: | Size: 460 B |
|
After Width: | Height: | Size: 604 B |
|
After Width: | Height: | Size: 592 B |
|
After Width: | Height: | Size: 571 B |
|
After Width: | Height: | Size: 564 B |
|
After Width: | Height: | Size: 541 B |
|
After Width: | Height: | Size: 646 B |
|
After Width: | Height: | Size: 583 B |
|
After Width: | Height: | Size: 630 B |
|
After Width: | Height: | Size: 609 B |
|
After Width: | Height: | Size: 591 B |
|
After Width: | Height: | Size: 625 B |
|
After Width: | Height: | Size: 582 B |
|
After Width: | Height: | Size: 602 B |
|
After Width: | Height: | Size: 593 B |
|
After Width: | Height: | Size: 600 B |
|
After Width: | Height: | Size: 514 B |
|
After Width: | Height: | Size: 575 B |
|
After Width: | Height: | Size: 571 B |
|
After Width: | Height: | Size: 579 B |
|
After Width: | Height: | Size: 617 B |
|
After Width: | Height: | Size: 603 B |
|
After Width: | Height: | Size: 589 B |
|
After Width: | Height: | Size: 513 B |
|
After Width: | Height: | Size: 531 B |
|
After Width: | Height: | Size: 563 B |
|
After Width: | Height: | Size: 555 B |
|
After Width: | Height: | Size: 523 B |
|
After Width: | Height: | Size: 621 B |
|
After Width: | Height: | Size: 571 B |
|
After Width: | Height: | Size: 540 B |
|
After Width: | Height: | Size: 622 B |
|
After Width: | Height: | Size: 598 B |
|
After Width: | Height: | Size: 578 B |
|
After Width: | Height: | Size: 596 B |
|
After Width: | Height: | Size: 599 B |
|
After Width: | Height: | Size: 554 B |