import torch
import torch.nn as nn
class DynamicModel(nn.Module):
def __init__(self, input_size, hidden_size):
super().__init__()
self.rnn_cell = nn.RNNCell(input_size, hidden_size)
self.fc = nn.Linear(hidden_size, 1)
def forward(self, x):
# x shape: (batch_size, seq_len, input_size)
batch_size, seq_len, _ = x.size()
hidden = torch.zeros(batch_size, self.rnn_cell.hidden_size)
outputs = []
for t in range(seq_len):
input_t = x[:, t, :]
hidden = self.rnn_cell(input_t, hidden)
# Conditional operation: if mean of hidden > 0, apply ReLU else apply tanh
if hidden.mean() > 0:
activated = torch.relu(hidden)
else:
activated = torch.tanh(hidden)
outputs.append(activated)
# Stack outputs: (seq_len, batch_size, hidden_size) -> (batch_size, seq_len, hidden_size)
outputs = torch.stack(outputs, dim=1)
# Take last output for prediction
last_output = outputs[:, -1, :]
out = self.fc(last_output)
return out
# Create model
model = DynamicModel(input_size=3, hidden_size=5)
# Example inputs with different sequence lengths
input1 = torch.randn(2, 4, 3) # batch=2, seq_len=4, input_size=3
input2 = torch.randn(2, 6, 3) # batch=2, seq_len=6, input_size=3
# Forward pass with input1
output1 = model(input1)
print(f"Output with seq_len=4: {output1}")
# Forward pass with input2
output2 = model(input2)
print(f"Output with seq_len=6: {output2}")