import tensorflow as tf
from tensorflow.keras import layers, models
# Load example dataset (e.g., Oxford-IIIT Pet Dataset for segmentation)
# For simplicity, use dummy data here
input_shape = (128, 128, 3)
num_classes = 3 # e.g., background, object1, object2
# Build a simple fully convolutional network
inputs = layers.Input(shape=input_shape)
x = layers.Conv2D(32, 3, activation='relu', padding='same')(inputs)
x = layers.MaxPooling2D()(x)
x = layers.Conv2D(64, 3, activation='relu', padding='same')(x)
x = layers.UpSampling2D()(x)
x = layers.Conv2D(num_classes, 1, activation='softmax', padding='same')(x)
model = models.Model(inputs, x)
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# Create dummy data: 10 images and labels with shape (128,128,num_classes)
import numpy as np
X_train = np.random.rand(10, 128, 128, 3).astype('float32')
y_train = np.zeros((10, 128, 128, num_classes), dtype='float32')
for i in range(10):
# Randomly assign each pixel a class
labels = np.random.randint(0, num_classes, (128, 128))
for c in range(num_classes):
y_train[i, :, :, c] = (labels == c).astype('float32')
# Train the model
history = model.fit(X_train, y_train, epochs=5, batch_size=2, validation_split=0.2)
# Output training and validation accuracy
train_acc = history.history['accuracy'][-1] * 100
val_acc = history.history['val_accuracy'][-1] * 100
print(f'Training accuracy: {train_acc:.2f}%')
print(f'Validation accuracy: {val_acc:.2f}%')