Advertisement
Guest User

Untitled

a guest
Nov 25th, 2017
94
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.56 KB | None | 0 0
  1.  
  2. import math
  3. import numpy as np
  4. import h5py
  5. import keras
  6. from keras.layers import LSTM, Activation, Dense, BatchNormalization
  7. from sklearn import datasets
  8. from sklearn.linear_model import Ridge
  9. from keras.wrappers.scikit_learn import KerasRegressor
  10. from keras.wrappers.scikit_learn import KerasClassifier
  11. from keras.optimizers import SGD
  12. from sklearn.model_selection import GridSearchCV
  13.  
  14. datafile = h5py.File('SCNeuronModelCompetition.mat')
  15. movie = datafile.get('trainingmovie_mini') # movie for training
  16. frhist = datafile.get('FRhist_tr') # firing rate histograms
  17.  
  18. # a little normalization for the movie (assuming that the movie is 3D array)
  19. def normalize(inputmovie):
  20.     movie_mean = np.mean(inputmovie, axis=(0, 1,2))
  21.     movie_std = np.std(inputmovie, axis=(0, 1 ,2))
  22.     return (inputmovie - movie_mean) / movie_std
  23.  
  24. movie_norm = normalize(movie)
  25. movie_norm = np.asarray(movie_norm)
  26. frhist = np.asarray(frhist)
  27. movie_chunk_length = movie_norm.shape[1]
  28. movie_pix = movie_norm.shape[2]
  29. #nHidden = [50,100,150,200,250]
  30. nLayer = [3,4,5,6]
  31. neurons = [5,10,15,20,25,30]
  32. nSCNeu = frhist.shape[2]
  33. batch_size = [5, 10, 15, 20]
  34. learn_rate = [0.001, 0.01, 0.1, 0.2, 0.3]
  35. momentum = [0.2, 0.4, 0.6, 0.8, 0.9]
  36. seed = 5
  37. X = movie_norm
  38. Y = frhist
  39. def create_model(neurons=1, nLayer=3, learn_rate=0.01, momentum=0.2):
  40.     model = keras.models.Sequential()
  41.     model.add(LSTM(neurons, input_shape=(movie_chunk_length,movie_pix), return_sequences=True, implementation=2))
  42.  
  43.     for _ in range(nLayer - 1):
  44.         model.add(BatchNormalization(momentum=momentum))
  45.         model.add(Activation('relu'))
  46.         model.add(LSTM(neurons, return_sequences=True))
  47.  
  48.     model.add(BatchNormalization(momentum=momentum))
  49.     model.add(Activation('linear'))
  50.     model.add(Dense(nSCNeu))
  51.     model.add(Activation('softplus'))
  52.     optimizer = SGD(lr=learn_rate, momentum = momentum)
  53.     ##adamopt = keras.optimizers.Adam(lr=0.001, decay=1e-7)   Custom optimizer above
  54.  
  55.     # Please make sure to use Poisson likelihood function for the loss function
  56.     model.compile(optimizer=optimizer, loss='poisson')
  57.     return model
  58.     # model.summary()
  59. np.random.seed(seed)
  60. #early_stopping = keras.callbacks.EarlyStopping(monitor='val_loss', patience=10)
  61. model = KerasRegressor(build_fn = create_model, epochs=100, batch_size = 10, verbose=0)
  62. init = ['glorot_uniform', 'normal', 'uniform']
  63. param_grid = dict(neurons = neurons, nLayer= nLayer, learn_rate = learn_rate, momentum = momentum)
  64. grid = GridSearchCV(estimator=model, param_grid = param_grid)
  65. grid_result = grid.fit(movie_norm, frhist)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement