Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Build the LSTM model
- def getModel():
- units1, units2 = int (nWords/4), int (nWords/8)
- model = keras.models.Sequential()
- model.add(keras.layers.embeddings.Embedding(input_dim = len(kTokenizer.word_index)+1,output_dim=units1,input_length=sequenceLength, trainable=True)) # Line 5
- model.add(keras.layers.LSTM(units = units2, return_sequences =False)) # Line 6
- model.add(keras.layers.Dense(len(labelToName), activation ='softmax')) # Line 7
- model.compile(optimizer='adam', loss = 'categorical_crossentropy', metrics=['acc'])
- return model
Add Comment
Please, Sign In to add comment