import tensorflow as tf
from tensorflow.keras import layers, models
from tensorflow.keras.applications import MobileNetV2
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# Load pre-trained MobileNetV2 without top layers
base_model = MobileNetV2(input_shape=(160, 160, 3), include_top=False, weights='imagenet')
base_model.trainable = False # Freeze base
# Add new classification head
model = models.Sequential([
base_model,
layers.GlobalAveragePooling2D(),
layers.Dropout(0.3),
layers.Dense(1, activation='sigmoid')
])
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# Data augmentation
train_datagen = ImageDataGenerator(rescale=1./255, rotation_range=20, width_shift_range=0.2,
height_shift_range=0.2, horizontal_flip=True, validation_split=0.2)
train_generator = train_datagen.flow_from_directory(
'cats_and_dogs/train', target_size=(160, 160), batch_size=32, class_mode='binary', subset='training')
validation_generator = train_datagen.flow_from_directory(
'cats_and_dogs/train', target_size=(160, 160), batch_size=32, class_mode='binary', subset='validation')
# Train only new layers
history = model.fit(train_generator, epochs=10, validation_data=validation_generator)
# Unfreeze some base layers for fine-tuning
base_model.trainable = True
for layer in base_model.layers[:-20]:
layer.trainable = False
model.compile(optimizer=tf.keras.optimizers.Adam(1e-5), loss='binary_crossentropy', metrics=['accuracy'])
# Continue training
history_fine = model.fit(train_generator, epochs=10, validation_data=validation_generator)