Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from tensorflow.keras.layers import Input, Dense, LSTM, Bidirectional, Conv1D
- from tensorflow.keras.layers import Flatten, Dropout
- from tensorflow.keras.models import Model
- from tensorflow.keras.optimizers import Adam
- import numpy as np
- from time import time
- def timeit(func, iterations, *args):
- t0 = time()
- for _ in range(iterations):
- print('.')
- func(*args)
- print("%.4f sec" % (time() - t0))
- batch_size = 32
- batch_shape = (batch_size, 400, 16)
- ipt = Input(batch_shape=batch_shape)
- x = Bidirectional(LSTM(512, activation='relu', return_sequences=True))(ipt)
- x = LSTM(512, activation='relu', return_sequences=True)(ipt)
- x = Conv1D(128, 400, strides=1, padding='same')(x)
- x = Flatten()(x)
- x = Dense(256, activation='relu')(x)
- x = Dropout(0.5)(x)
- x = Dense(128, activation='relu')(x)
- x = Dense(64, activation='relu')(x)
- out = Dense(1, activation='sigmoid')(x)
- model = Model(ipt, out)
- X = np.random.randn(*batch_shape)
- Y = np.random.randint(0, 2, (batch_size, 1))
- model.compile(Adam(lr=1e-4), 'binary_crossentropy')
- model.train_on_batch(X, Y)
- timeit(model.train_on_batch, 4, X, Y)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement