import tensorflow as tf
from tensorflow.keras.layers import Input, Embedding, LSTM, Dense, Dropout
from tensorflow.keras.models import Model
from tensorflow.keras.callbacks import EarlyStopping
# Sample data placeholders (replace with actual data loading)
X_train = tf.random.uniform((1000, 100), maxval=20000, dtype=tf.int32)
y_train = tf.random.uniform((1000, 1), maxval=2, dtype=tf.int32)
X_val = tf.random.uniform((200, 100), maxval=20000, dtype=tf.int32)
y_val = tf.random.uniform((200, 1), maxval=2, dtype=tf.int32)
vocab_size = 20000
embedding_dim = 64
max_len = 100
inputs = Input(shape=(max_len,))
embedding = Embedding(vocab_size, embedding_dim)(inputs)
lstm = LSTM(64, return_sequences=False)(embedding)
drop = Dropout(0.5)(lstm)
outputs = Dense(1, activation='sigmoid')(drop)
model = Model(inputs, outputs)
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
loss='binary_crossentropy',
metrics=['accuracy'])
early_stop = EarlyStopping(monitor='val_loss', patience=3, restore_best_weights=True)
history = model.fit(X_train, y_train, epochs=20, batch_size=32, validation_data=(X_val, y_val), callbacks=[early_stop])