import torch
import torch.nn as nn
import torch.optim as optim
# Simple model definition
class SimpleNN(nn.Module):
def __init__(self):
super().__init__()
self.fc = nn.Linear(10, 3) # 3 classes
def forward(self, x):
return self.fc(x)
# Dummy dataset
X_train = torch.randn(100, 10)
y_train = torch.randint(0, 3, (100,)) # class indices 0,1,2
X_val = torch.randn(30, 10)
y_val = torch.randint(0, 3, (30,))
model = SimpleNN()
optimizer = optim.SGD(model.parameters(), lr=0.1)
# Change loss function from MSELoss to CrossEntropyLoss
criterion = nn.CrossEntropyLoss()
def train(model, X, y):
model.train()
optimizer.zero_grad()
outputs = model(X)
loss = criterion(outputs, y)
loss.backward()
optimizer.step()
_, predicted = torch.max(outputs, 1)
accuracy = (predicted == y).float().mean().item() * 100
return loss.item(), accuracy
def evaluate(model, X, y):
model.eval()
with torch.no_grad():
outputs = model(X)
loss = criterion(outputs, y)
_, predicted = torch.max(outputs, 1)
accuracy = (predicted == y).float().mean().item() * 100
return loss.item(), accuracy
# Training loop
for epoch in range(30):
train_loss, train_acc = train(model, X_train, y_train)
val_loss, val_acc = evaluate(model, X_val, y_val)
# Final metrics
print(f"Training loss: {train_loss:.4f}, Training accuracy: {train_acc:.2f}%")
print(f"Validation loss: {val_loss:.4f}, Validation accuracy: {val_acc:.2f}%")