import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
Set the seed
tf.random.set_seed(42)
Preprocess data(get all of the pixel values between 0 & 1, also calles scaling / normalization)
train_datagen = ImageDataGenerator(rescale = 1./255)
valid_datagen = ImageDataGenerator(rescale = 1./255)
Setup path to our data directories
train_dir = '/content/pizza_steak/train'
test_dir = 'pizza_steak/test'
Import data from directories and turn it into batches
train_data = train_datagen.flow_from_directory(directory = train_dir,
batch_size = 32,
target_size = (224, 224),
class_mode = 'binary',
seed = 42)
valid_data = valid_datagen.flow_from_directory(directory = test_dir,
batch_size = 32,
target_size = (224, 224),
class_mode = 'binary',
seed = 42)
Build a CNN model (same as the Tiny VGG on the CNN explain website)
model_1 = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(filters = 10,
kernel_size = 3,
activation = 'relu',
input_shape = (244, 244, 3)),
tf.keras.layers.Conv2D(10, 3, activation = 'relu'),
tf.keras.layers.MaxPool2D(pool_size = 2,
padding = 'valid'),
tf.keras.layers.Conv2D(10, 3, activation = 'relu'),
tf.keras.layers.Conv2D(10, 3, activation = 'relu'),
tf.keras.layers.MaxPool2D(2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(1, activation = 'sigmoid')
])
Compile our CNN
model_1.compile(
loss = 'binary_crossentropy',
optimizer = tf.keras.optimizers.Adam(),
metrics = ['accuracy']
)
Fit the model
history_1 = model_1.fit(train_data,
epochs = 5,
steps_per_epoch = len(train_data),
validation_data = valid_data,
validation_steps = len(valid_data))
CodePudding user response:
You either need to change your input_shape
to (224, 224, 3)
or the target_size
to (244, 244, 3)
. It will not work with different shapes. Here is a working example:
import tensorflow as tf
import matplotlib.pyplot as plt
BATCH_SIZE = 32
flowers = tf.keras.utils.get_file(
'flower_photos',
'https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz',
untar=True)
train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)
train_data = train_datagen.flow_from_directory(directory = flowers,
batch_size = 32,
target_size = (224, 224),
class_mode = 'sparse',
seed = 42)
model_1 = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(filters = 10,
kernel_size = 3,
activation = 'relu',
input_shape = (224, 224, 3)),
tf.keras.layers.Conv2D(10, 3, activation = 'relu'),
tf.keras.layers.MaxPool2D(pool_size = 2,
padding = 'valid'),
tf.keras.layers.Conv2D(10, 3, activation = 'relu'),
tf.keras.layers.Conv2D(10, 3, activation = 'relu'),
tf.keras.layers.MaxPool2D(2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(5, activation = 'softmax')
])
model_1.compile(
loss = 'sparse_categorical_crossentropy',
optimizer = tf.keras.optimizers.Adam(),
metrics = ['accuracy']
)
history_1 = model_1.fit(train_data,steps_per_epoch = len(train_data),
epochs = 5)