Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from keras.models import Sequential
- from keras.layers import Dense, Dropout, LeakyReLU
- from keras.optimizers import adam
- ad = adam()
- size_factor=128
- predictor=Sequential()
- predictor.add(Dense(4*size_factor, input_dim=10))
- predictor.add(LeakyReLU())
- predictor.add(Dense(4*size_factor))
- predictor.add(LeakyReLU())
- predictor.add(Dense(4*size_factor))
- predictor.add(LeakyReLU())
- predictor.add(Dropout(0.3))
- predictor.add(Dense(2*size_factor))
- predictor.add(LeakyReLU())
- predictor.add(Dropout(0.3))
- predictor.add(Dense(size_factor))
- predictor.add(LeakyReLU())
- predictor.add(Dropout(0.3))
- predictor.add(Dense(1, activation='sigmoid'))
- predictor.compile(adam(),loss='binary_crossentropy')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement