Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- from tensorflow.keras.layers import Dense, LSTM, Masking, TimeDistributed, RepeatVector
- from tensorflow.keras.preprocessing.sequence import pad_sequences
- from tensorflow.keras.models import Sequential
- import numpy as np
- #(samples,timesteps,features) samples=4,features=3, timesteps=variable length
- train_X = np.array([
- [[0, 1, 2], [9, 8, 7],[3, 6, 8]],
- [[3, 4, 5]],
- [[6, 7, 8], [6, 5, 4],[1, 7, 4]],
- [[9, 0, 1], [3, 7, 4]]
- ])
- train_Y = np.array([0, 1, 1, 0])
- n_in = 3
- n_feat = 3
- n_out = 1
- # padding
- '''
- train_X = np.array([
- [[0, 1, 2], [9, 8, 7],[3, 6, 8]],
- [[3, 4, 5], [0, 0, 0],[0, 0, 0]],
- [[6, 7, 8], [6, 5, 4],[1, 7, 4]],
- [[9, 0, 1], [3, 7, 4],[0, 0, 0]]
- ])
- '''
- train_X = pad_sequences(train_X, padding='post')
- model = Sequential()
- inputs = tf.keras.Input(shape=(n_in, n_feat))
- # Masking
- masked_input = Masking(mask_value=0)(inputs)
- # encoder
- encoder = LSTM(100, activation='relu')(masked_input)
- # decoder 1: reconstruct input sequence
- decoder1 = RepeatVector(n_in)(encoder)
- decoder1 = LSTM(100, activation='relu', return_sequences=True)(decoder1)
- decoder1 = TimeDistributed(Dense(3))(decoder1)
- # decoder 2: reconstruct output sequence
- decoder2 = RepeatVector(n_out)(encoder)
- decoder2 = LSTM(100, activation='relu', return_sequences=True)(decoder2)
- decoder2 = TimeDistributed(Dense(1))(decoder2)
- model = tf.keras.Model(inputs=inputs, outputs=[decoder1, decoder2])
- model.compile(optimizer='rmsprop', loss='mse')
- # print(model.summary())
- model.fit(train_X, [train_X, train_Y], epochs=500, verbose=1)
- yhat = model.predict(train_X, verbose=0)
- print(yhat)
- print('==========================================')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement