import tensorflow as tf
from tensorflow.keras import layers, models
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# Define data augmentation
train_datagen = ImageDataGenerator(
rescale=1./255,
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
horizontal_flip=True,
validation_split=0.2
)
# Validation data generator without augmentation
val_datagen = ImageDataGenerator(
rescale=1./255,
validation_split=0.2
)
# Load training and validation data
train_generator = train_datagen.flow_from_directory(
'data/train',
target_size=(128, 128),
batch_size=32,
class_mode='categorical',
subset='training'
)
validation_generator = val_datagen.flow_from_directory(
'data/train',
target_size=(128, 128),
batch_size=32,
class_mode='categorical',
subset='validation'
)
# Build model with dropout
model = models.Sequential([
layers.Conv2D(32, (3,3), activation='relu', input_shape=(128,128,3)),
layers.MaxPooling2D(2,2),
layers.Dropout(0.25),
layers.Conv2D(64, (3,3), activation='relu'),
layers.MaxPooling2D(2,2),
layers.Dropout(0.25),
layers.Flatten(),
layers.Dense(128, activation='relu'),
layers.Dropout(0.5),
layers.Dense(17, activation='softmax') # 17 keypoints classes
])
# Compile with lower learning rate
model.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=0.0005),
loss='categorical_crossentropy',
metrics=['accuracy']
)
# Train model
history = model.fit(
train_generator,
epochs=30,
validation_data=validation_generator
)