from some_other_script import num_encoder_tokens, num_decoder_tokens, decoder_target_data, encoder_input_data, decoder_input_data, decoder_target_data
from tensorflow import keras
# Add Dense to the imported layers
from keras.layers import Input, LSTM, Dense
from keras.models import Model
# Encoder training setup
encoder_inputs = Input(shape=(None, num_encoder_tokens))
encoder_lstm = LSTM(256, return_state=True)
encoder_outputs, state_hidden, state_cell = encoder_lstm(encoder_inputs)
encoder_states = [state_hidden, state_cell]
# Decoder training setup:
decoder_inputs = Input(shape=(None, num_decoder_tokens))
decoder_lstm = LSTM(256, return_sequences=True, return_state=True)
decoder_outputs, decoder_state_hidden, decoder_state_cell = decoder_lstm(decoder_inputs, initial_state=encoder_states)
decoder_dense = Dense(num_decoder_tokens, activation='softmax')
decoder_outputs = decoder_dense(decoder_outputs)
# Building the training model:
training_model = Model([encoder_inputs, decoder_inputs], decoder_outputs)
print("Model summary:\n")
training_model.summary()
print("\n\n")
# Compile the model:
training_model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
# Choose the batch size
# and number of epochs:
batch_size = 0
epochs = 0
print("Training the model:\n")
# Train the model:
training_model.fit([encoder_input_data, decoder_input_data],
decoder_target_data,
batch_size=10,
epochs=100,
validation_split=0.2)