Advertisement
Guest User

Untitled

a guest
Apr 29th, 2017
179
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. def model3(kernel_number = 200, kernel_shape = (window_height,3)):
  2. #stride = 1
  3. #dim = 40
  4. #window_height = 8
  5. #splits = ((40-8)+1)/1 = 33
  6. #next(test_generator())
  7. #next(train_generator(batch_size))
  8.  
  9. #kernel_number = 200
  10. list_of_input = [Input(shape = (window_height,total_frames_with_deltas,3)) for i in range(splits)]
  11. list_of_conv_output = []
  12. list_of_max_out = []
  13. for i in range(splits):
  14. if splits == 1:
  15. list_of_conv_output.append(Conv2D(filters = kernel_number , kernel_size = kernel_shape, activation = 'relu')(list_of_input[i]))
  16. list_of_max_out.append((MaxPooling2D(pool_size=((1,11)))(list_of_conv_output[i])))
  17. else:
  18. list_of_conv_output.append(Conv2D(filters = 200 , kernel_size = (window_height,3) , activation = 'relu')(list_of_input[i]))
  19. list_of_max_out.append((MaxPooling2D(pool_size=((1,11)))(list_of_conv_output[i])))
  20.  
  21. merge = keras.layers.concatenate(list_of_max_out)
  22. print merge.shape
  23. reshape = Reshape((total_frames/total_frames,-1))(merge)
  24.  
  25. dense1 = Dense(units = 1000, activation = 'relu', name = "dense_1")(reshape)
  26. dense2 = Dense(units = 1000, activation = 'relu', name = "dense_2")(dense1)
  27. dense3 = Dense(units = 145 , activation = 'softmax', name = "dense_3")(dense2)
  28.  
  29.  
  30. model = Model(inputs = list_of_input , outputs = dense3)
  31. model.compile(loss="categorical_crossentropy", optimizer="SGD" , metrics = [metrics.categorical_accuracy])
  32.  
  33. reduce_lr=ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, verbose=1, mode='auto', epsilon=0.001, cooldown=0)
  34. stop = EarlyStopping(monitor='val_loss', min_delta=0, patience=5, verbose=1, mode='auto')
  35. log=csv_logger = CSVLogger('/home/keerthikan/kaldi-trunk/dnn/training_'+str(total_frames)+"_"+str(dim)+"_"+str(window_height)+"_"+str(batch_size)+".csv")
  36. checkpoint = ModelCheckpoint(filepath="/media/keerthikan/E2302E68302E443F/Timit-dataset/timit/fbank/nn/"+str(total_frames)+"_"+str(dim)+"_"+str(window_height)+"_"+str(batch_size)+".hdf5",save_best_only=True)
  37.  
  38. if len(sys.argv) == 7:
  39. model.load_weigts(weights)
  40.  
  41. print model.summary()
  42.  
  43. #raw_input("okay?")
  44. #hist_current = model.fit_generator(train_generator(batch_size),
  45. # steps_per_epoch=10,
  46. # epochs = 100000,
  47. # verbose = 1,
  48. # validation_data = test_generator(),
  49. # validation_steps=1,
  50. # pickle_safe = True,
  51. # workers = 4,
  52. # callbacks = [log,checkpoint])
  53. return model
  54.  
  55.  
  56. #model3()
  57.  
  58. model = KerasClassifier(build_fn=model3,epochs = 10,verbose=1)
  59. kernel_number = [10,50,100,150,200,250]
  60. kernel_shape = [(window_height,3),(window_height,5),(window_height,8)]
  61. param_grid = dict(kernel_number = kernel_number , kernel_shape=kernel_shape)
  62. grid = GridSearchCV(estimator=model, param_grid=param_grid)
  63. train_input,train_output = next(train_generator(1))
  64.  
  65. grid_results=grid.fit(train_input,train_output)
  66.  
  67. print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
  68. means = grid_result.cv_results_['mean_test_score']
  69. stds = grid_result.cv_results_['std_test_score']
  70. params = grid_result.cv_results_['params']
  71. for mean, stdev, param in zip(means, stds, params):
  72. print("%f (%f) with: %r" % (mean, stdev, param))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement