This code trains a simple neural network with one neuron to learn a basic pattern using backpropagation. It prints the learned weights, bias, and predictions after training.
import numpy as np
# Simple neural network with one input, one neuron, one output
# Activation function: sigmoid and its derivative
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(x):
return x * (1 - x)
# Input data (4 samples, 1 feature)
inputs = np.array([[0], [1], [2], [3]])
# Actual outputs
actual_output = np.array([[0], [0], [1], [1]])
# Initialize weights randomly
weights = np.random.uniform(size=(1,1))
bias = np.random.uniform()
learning_rate = 0.1
for epoch in range(1000):
# Forward pass
linear_output = np.dot(inputs, weights) + bias
predicted_output = sigmoid(linear_output)
# Calculate error
error = actual_output - predicted_output
# Backward pass
d_predicted_output = error * sigmoid_derivative(predicted_output)
# Update weights and bias
weights += learning_rate * np.dot(inputs.T, d_predicted_output)
bias += learning_rate * np.sum(d_predicted_output)
# Final predictions after training
final_output = sigmoid(np.dot(inputs, weights) + bias)
print("Weights after training:", weights.flatten())
print("Bias after training:", bias)
print("Predictions after training:", final_output.flatten())