import tensorflow as tf
from tensorflow.keras.layers import Input, Embedding, LSTM, Dense, Dropout
from tensorflow.keras.models import Model
from tensorflow.keras.callbacks import EarlyStopping
# Sample data placeholders (replace with real data loading)
X_train = tf.random.uniform((1000, 10), maxval=1000, dtype=tf.int32)
y_train = tf.random.uniform((1000,), maxval=5, dtype=tf.int32)
X_val = tf.random.uniform((200, 10), maxval=1000, dtype=tf.int32)
y_val = tf.random.uniform((200,), maxval=5, dtype=tf.int32)
vocab_size = 1000
embedding_dim = 64
num_classes = 5
inputs = Input(shape=(10,))
embedding = Embedding(vocab_size, embedding_dim)(inputs)
lstm = LSTM(64)(embedding)
drop = Dropout(0.5)(lstm) # Added dropout
outputs = Dense(num_classes, activation='softmax')(drop)
model = Model(inputs, outputs)
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
early_stop = EarlyStopping(monitor='val_loss', patience=3, restore_best_weights=True)
history = model.fit(X_train, y_train, epochs=30, batch_size=32,
validation_data=(X_val, y_val), callbacks=[early_stop])