Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- model = Sequential()
- model.add(LSTM(
- 256,
- input_shape=(network_input.shape[1], network_input.shape[2]),
- return_sequences=True
- ))
- model.add(Dropout(0.3))
- model.add(LSTM(512, return_sequences=True))
- model.add(Dropout(0.3))
- model.add(LSTM(512))
- model.add(Dense(256))
- model.add(Dropout(0.3))
- model.add(Dense(n_vocab))
- model.add(Activation('softmax'))
- model.summary()
- model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
- filepath = "weights-improvement-{epoch:02d}-{loss:.4f}-bigger.hdf5"
- checkpoint = ModelCheckpoint(
- filepath,
- monitor='loss',
- verbose=0,
- save_best_only=True,
- mode='min'
- )
- callbacks_list = [checkpoint]
- history = model.fit(network_input, network_output, validation_split=0.33,
- epochs=600, batch_size=64, callbacks=callbacks_list)
- print(history.history.keys())
- # acc history
- plt.plot(history.history['acc'])
- plt.plot(history.history['val_acc'])
- plt.title('model accuracy')
- plt.ylabel('accuracy')
- plt.xlabel('epoch')
- plt.legend(['train', 'test'], loc='upper left')
- plt.savefig("acc_history.png")
- plt.close()
- plt.plot(history.history['loss'])
- plt.plot(history.history['val_loss'])
- plt.title('model loss')
- plt.ylabel('loss')
- plt.xlabel('epoch')
- plt.legend(['train', 'test'], loc='upper left')
- plt.savefig("history_loss.png")
Add Comment
Please, Sign In to add comment