Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from keras.callbacks import ModelCheckpoint
- from keras.models import Sequential
- from keras.layers import Dense, Activation, Flatten
- from keras import optimizers
- NN_model = Sequential()
- # The Input Layer :
- NN_model.add(Dense(64, kernel_initializer='random_uniform',input_dim = X_train.shape[1], activation='relu'))
- # The Hidden Layers :
- NN_model.add(Dense(128, kernel_initializer='random_uniform',activation='relu'))
- NN_model.add(Dense(128, kernel_initializer='random_uniform',activation='relu'))
- NN_model.add(Dense(128, kernel_initializer='random_uniform',activation='relu'))
- # The Output Layer :
- NN_model.add(Dense(1, kernel_initializer='random_uniform',activation='linear'))
- adam = optimizers.Adam(lr=0.1, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False)
- # Compile the network :
- NN_model.compile(loss='mean_squared_error', optimizer=sgd, metrics=['mean_squared_error'], )
- NN_model.summary()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement