import torch
from torch import nn, optim
from torchvision import datasets, transforms
from torch.utils.data import DataLoader
# Define transformations
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
# Load datasets
train_dataset = datasets.MNIST(root='./data', train=True, download=True, transform=transform)
val_dataset = datasets.MNIST(root='./data', train=False, download=True, transform=transform)
# Data loaders with smaller batch size and shuffling enabled
train_loader = DataLoader(train_dataset, batch_size=32, shuffle=True)
val_loader = DataLoader(val_dataset, batch_size=32, shuffle=False)
# Simple neural network
class SimpleNN(nn.Module):
def __init__(self):
super().__init__()
self.flatten = nn.Flatten()
self.linear = nn.Sequential(
nn.Linear(28*28, 128),
nn.ReLU(),
nn.Linear(128, 10)
)
def forward(self, x):
x = self.flatten(x)
return self.linear(x)
# Initialize model, loss, optimizer
model = SimpleNN()
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=0.001)
# Training loop
for epoch in range(5):
model.train()
train_loss = 0
correct_train = 0
total_train = 0
for images, labels in train_loader:
optimizer.zero_grad()
outputs = model(images)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
train_loss += loss.item() * images.size(0)
_, predicted = outputs.max(1)
correct_train += predicted.eq(labels).sum().item()
total_train += labels.size(0)
train_loss /= total_train
train_acc = 100. * correct_train / total_train
model.eval()
val_loss = 0
correct_val = 0
total_val = 0
with torch.no_grad():
for images, labels in val_loader:
outputs = model(images)
loss = criterion(outputs, labels)
val_loss += loss.item() * images.size(0)
_, predicted = outputs.max(1)
correct_val += predicted.eq(labels).sum().item()
total_val += labels.size(0)
val_loss /= total_val
val_acc = 100. * correct_val / total_val
print(f"Epoch {epoch+1}: Train Loss={train_loss:.4f}, Train Acc={train_acc:.2f}%, Val Loss={val_loss:.4f}, Val Acc={val_acc:.2f}%")