Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from keras.layers import (Dense, Dropout, Embedding, GlobalAveragePooling1D,
- Input, Bidirectional, Activation, Reshape)
- from keras.layers.merge import concatenate
- from keras.layers.recurrent import LSTM
- from keras.models import Model, Sequential
- from keras.optimizers import SGD
- from keras.regularizers import l1_l2
- from keras.utils import plot_model
- max_features = 20000
- maxlen = 30
- embedding_dims = 50
- visible = Input(shape=(30,), name="Input")
- embedding = Embedding(max_features+1,
- embedding_dims,
- input_length=maxlen,
- name="Embedding")(visible)
- embedding = Dropout(rate=0.5)(embedding)
- transformed = Reshape((-1,))(embedding)
- representation = Dense(50, name="Representation")(transformed)
- representation = Dropout(rate=0.5)(representation)
- output = Dense(n_labels, activation='softmax', name="Output")(representation)
- model = Model(inputs=visible, outputs=output)
- sgd = SGD(lr=0.01, momentum=0.8, decay=0.0, nesterov=False)
- model.compile(loss='categorical_crossentropy',
- optimizer=sgd,
- metrics=['accuracy'])
- max_features = 20000
- maxlen = 30
- embedding_dims = 50
- model = Sequential()
- # we start off with an efficient embedding layer which maps
- # our vocab indices into embedding_dims dimensions
- model.add(Embedding(max_features+1,
- embedding_dims,
- input_length=maxlen))
- model.add(Dropout(rate=0.5))
- # we add a GlobalAveragePooling1D, which will average the embeddings
- # of all words in the document
- model.add(GlobalAveragePooling1D())
- model.add(Dropout(rate=0.5))
- # We project onto a single unit output layer, and squash it with a sigmoid:
- model.add(Dense(n_labels, activation='softmax'))
- sgd = SGD(lr=0.01, momentum=0.8, decay=0.0, nesterov=False)
- model.compile(loss='categorical_crossentropy',
- optimizer=sgd,
- metrics=['accuracy'])
- return model
Add Comment
Please, Sign In to add comment