Advertisement
Guest User

Untitled

a guest
May 24th, 2019
107
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 0.92 KB | None | 0 0
  1.     data, y = utils.load_data()
  2.  
  3.     # One hot encoding of target vector
  4.     Y = pd.get_dummies(y).values
  5.     n_classes = Y.shape[1]
  6.  
  7.     for nodes in [300]:
  8.         for key, X in data.items():
  9.             print('Running : ', key, nodes, X.shape)
  10.  
  11.             # Split data into training and testing data
  12.             X_train, X_test, Y_train, Y_test = train_test_split(
  13.                 X, Y, test_size=0.2, random_state=1, stratify=y)
  14.  
  15.             # Normalize data with mean 0 and std 1
  16.             X_scaled = normalize(X_train)
  17.  
  18.             # Train the neural network model
  19.             n_features = X.shape[1]
  20.             model = nn_model(n_features, n_classes, nodes, 0.2)
  21.             history = model.fit(X_scaled, Y_train,
  22.                                 epochs=100,
  23.                                 batch_size=5,
  24.                                 verbose=1,
  25.                                 callbacks=[csv_logger])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement