import tensorflow as tf
from tensorflow.keras import layers, models
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# Data augmentation to help generalize
train_datagen = ImageDataGenerator(
rescale=1./255,
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
horizontal_flip=True,
validation_split=0.2
)
train_generator = train_datagen.flow_from_directory(
'face_dataset/train',
target_size=(64, 64),
batch_size=32,
class_mode='categorical',
subset='training'
)
# Separate validation datagen without augmentation
val_datagen = ImageDataGenerator(
rescale=1./255,
validation_split=0.2
)
validation_generator = val_datagen.flow_from_directory(
'face_dataset/train',
target_size=(64, 64),
batch_size=32,
class_mode='categorical',
subset='validation'
)
# Build model with dropout to reduce overfitting
model = models.Sequential([
layers.Conv2D(32, (3,3), activation='relu', input_shape=(64, 64, 3)),
layers.MaxPooling2D(2,2),
layers.Conv2D(64, (3,3), activation='relu'),
layers.MaxPooling2D(2,2),
layers.Dropout(0.3),
layers.Flatten(),
layers.Dense(128, activation='relu'),
layers.Dropout(0.5),
layers.Dense(train_generator.num_classes, activation='softmax')
])
model.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=0.0005),
loss='categorical_crossentropy',
metrics=['accuracy']
)
history = model.fit(
train_generator,
epochs=30,
validation_data=validation_generator
)