import tensorflow as tf
from tensorflow.keras import layers, models
from tensorflow.keras.callbacks import EarlyStopping
# Load example dataset (e.g., CIFAR-10 but only 5 classes for demo)
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()
# Filter to first 5 classes only
train_filter = y_train.flatten() < 5
test_filter = y_test.flatten() < 5
x_train, y_train = x_train[train_filter], y_train[train_filter]
x_test, y_test = x_test[test_filter], y_test[test_filter]
# Normalize images
x_train, x_test = x_train / 255.0, x_test / 255.0
# Convert labels to categorical
num_classes = 5
y_train_cat = tf.keras.utils.to_categorical(y_train, num_classes)
y_test_cat = tf.keras.utils.to_categorical(y_test, num_classes)
# Build model with dropout to reduce overfitting
model = models.Sequential([
layers.Flatten(input_shape=(32, 32, 3)),
layers.Dense(128, activation='relu'),
layers.Dropout(0.4),
layers.Dense(64, activation='relu'),
layers.Dropout(0.3),
layers.Dense(num_classes, activation='softmax')
])
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# Early stopping callback
early_stop = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True)
# Train model
history = model.fit(
x_train, y_train_cat,
epochs=50,
batch_size=64,
validation_split=0.2,
callbacks=[early_stop],
verbose=0
)
# Evaluate on test data
test_loss, test_acc = model.evaluate(x_test, y_test_cat, verbose=0)
print(f'Test accuracy: {test_acc*100:.2f}%', f'Test loss: {test_loss:.4f}')