Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- data, y = utils.load_data()
- # One hot encoding of target vector
- Y = pd.get_dummies(y).values
- n_classes = Y.shape[1]
- for nodes in [300]:
- for key, X in data.items():
- print('Running : ', key, nodes, X.shape)
- # Split data into training and testing data
- X_train, X_test, Y_train, Y_test = train_test_split(
- X, Y, test_size=0.2, random_state=1, stratify=y)
- # Normalize data with mean 0 and std 1
- X_scaled = normalize(X_train)
- # Train the neural network model
- n_features = X.shape[1]
- model = nn_model(n_features, n_classes, nodes, 0.2)
- history = model.fit(X_scaled, Y_train,
- epochs=100,
- batch_size=5,
- verbose=1,
- callbacks=[csv_logger])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement