Created
December 1, 2016 15:56
-
-
Save stefanthaler/6120be321431cb8506429d4c635d530b to your computer and use it in GitHub Desktop.
Keras Embedding + Backwards LSTM
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from keras.models import Sequential # model used | |
from keras.layers import Dense, Embedding, LSTM # layers used | |
batch_size = 2 # how many sequence to process in parallel | |
time_steps = 3 # lstm length, number of cells, etc. | |
input_dim = 1 # number of features | |
embedding_size = 5 # size of embedding vector | |
model = Sequential() | |
model.add(Embedding( | |
input_dim=input_dim, | |
output_dim=embedding_size, | |
input_length=time_steps, | |
batch_input_shape=(batch_size,time_steps), | |
mask_zero=True | |
)) | |
model.add(LSTM(10, batch_input_shape=(batch_size,time_steps,embedding_size), return_sequences=False, go_backwards=True)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment