Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import math
- import numpy as np
- import h5py
- import keras
- from keras.layers import LSTM, Activation, Dense, BatchNormalization
- from sklearn import datasets
- from sklearn.linear_model import Ridge
- from keras.wrappers.scikit_learn import KerasRegressor
- from keras.wrappers.scikit_learn import KerasClassifier
- from keras.optimizers import SGD
- from sklearn.model_selection import GridSearchCV
- datafile = h5py.File('SCNeuronModelCompetition.mat')
- movie = datafile.get('trainingmovie_mini') # movie for training
- frhist = datafile.get('FRhist_tr') # firing rate histograms
- # a little normalization for the movie (assuming that the movie is 3D array)
- def normalize(inputmovie):
- movie_mean = np.mean(inputmovie, axis=(0, 1,2))
- movie_std = np.std(inputmovie, axis=(0, 1 ,2))
- return (inputmovie - movie_mean) / movie_std
- movie_norm = normalize(movie)
- movie_norm = np.asarray(movie_norm)
- frhist = np.asarray(frhist)
- movie_chunk_length = movie_norm.shape[1]
- movie_pix = movie_norm.shape[2]
- #nHidden = [50,100,150,200,250]
- nLayer = [3,4,5,6]
- neurons = [5,10,15,20,25,30]
- nSCNeu = frhist.shape[2]
- batch_size = [5, 10, 15, 20]
- learn_rate = [0.001, 0.01, 0.1, 0.2, 0.3]
- momentum = [0.2, 0.4, 0.6, 0.8, 0.9]
- seed = 5
- X = movie_norm
- Y = frhist
- def create_model(neurons=1, nLayer=3, learn_rate=0.01, momentum=0.2):
- model = keras.models.Sequential()
- model.add(LSTM(neurons, input_shape=(movie_chunk_length,movie_pix), return_sequences=True, implementation=2))
- for _ in range(nLayer - 1):
- model.add(BatchNormalization(momentum=momentum))
- model.add(Activation('relu'))
- model.add(LSTM(neurons, return_sequences=True))
- model.add(BatchNormalization(momentum=momentum))
- model.add(Activation('linear'))
- model.add(Dense(nSCNeu))
- model.add(Activation('softplus'))
- optimizer = SGD(lr=learn_rate, momentum = momentum)
- ##adamopt = keras.optimizers.Adam(lr=0.001, decay=1e-7) Custom optimizer above
- # Please make sure to use Poisson likelihood function for the loss function
- model.compile(optimizer=optimizer, loss='poisson')
- return model
- # model.summary()
- np.random.seed(seed)
- #early_stopping = keras.callbacks.EarlyStopping(monitor='val_loss', patience=10)
- model = KerasRegressor(build_fn = create_model, epochs=100, batch_size = 10, verbose=0)
- init = ['glorot_uniform', 'normal', 'uniform']
- param_grid = dict(neurons = neurons, nLayer= nLayer, learn_rate = learn_rate, momentum = momentum)
- grid = GridSearchCV(estimator=model, param_grid = param_grid)
- grid_result = grid.fit(movie_norm, frhist)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement