This code builds a small neural network with a softmax output layer for 3 classes. It trains on some simple data and then predicts the probabilities for one new example. The output shows the probabilities for each class and confirms they add up to 1.
import tensorflow as tf
import numpy as np
# Create a simple model with softmax output for 3 classes
model = tf.keras.Sequential([
tf.keras.layers.Dense(5, activation='relu', input_shape=(4,)),
tf.keras.layers.Dense(3, activation='softmax')
])
# Compile the model with loss and optimizer
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# Create dummy data: 6 samples, 4 features each
x_train = np.array([[1, 2, 3, 4],
[4, 3, 2, 1],
[1, 0, 1, 0],
[0, 1, 0, 1],
[2, 2, 2, 2],
[3, 3, 3, 3]], dtype=np.float32)
# Labels for 3 classes (0, 1, or 2)
y_train = np.array([0, 1, 2, 1, 0, 2], dtype=np.int32)
# Train the model for 5 epochs
history = model.fit(x_train, y_train, epochs=5, verbose=0)
# Predict probabilities for a new sample
sample = np.array([[1, 2, 3, 4]], dtype=np.float32)
prediction = model.predict(sample)
print('Predicted probabilities:', prediction)
print('Sum of probabilities:', prediction.sum())