import tensorflow as tf
from tensorflow.keras import layers, models, callbacks
# Load example dataset
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
# Expand dims for CNN input
x_train = x_train[..., tf.newaxis]
x_test = x_test[..., tf.newaxis]
# Build simple CNN model
model = models.Sequential([
layers.Conv2D(32, 3, activation='relu', input_shape=(28, 28, 1)),
layers.MaxPooling2D(),
layers.Conv2D(64, 3, activation='relu'),
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(64, activation='relu'),
layers.Dense(10, activation='softmax')
])
# Define initial learning rate
initial_lr = 0.01
# Define learning rate scheduler function
def scheduler(epoch, lr):
if epoch >= 5:
return lr * 0.5
else:
return lr
lr_callback = callbacks.LearningRateScheduler(scheduler)
# Compile model
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=initial_lr),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Train model with learning rate scheduler
history = model.fit(x_train, y_train, epochs=15, batch_size=64,
validation_split=0.2, callbacks=[lr_callback])
# Evaluate on test data
test_loss, test_acc = model.evaluate(x_test, y_test)