Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ####### What if ########
- from keras.models import Sequential
- from keras.layers import Dense
- from keras.wrappers.scikit_learn import KerasClassifier, KerasRegressor
- from sklearn.model_selection import StratifiedKFold, KFold
- from sklearn.model_selection import cross_val_score
- import numpy
- from keras import regularizers
- # KFold vs StratifiedKFold
- #One can be a victim of skewed target values with Random subsampling and K-fold which can be fixed by
- #stratification. Stratification makes sure that you get similar target distribution in each of your folds
- #(chunks) of your data.
- # Cross validation allows us to compare different ML algorithm.
- # Function to create model, required for KerasClassifier
- def create_model():
- # create model
- len_token = len(counts)
- model = Sequential()
- model.add(layers.Dense(10,activation='relu',kernel_regularizer=regularizers.l2(0.001),
- input_shape=(len_token,))) # there are 5191 inputs ! => 10 nodes
- model.add(layers.Dropout(0.6))
- model.add(layers.Dense(10,activation='relu',kernel_regularizer=regularizers.l2(0.001)))
- model.add(layers.Dropout(0.5))
- model.add(layers.Dense(2,activation='sigmoid')) # there are 2 probability outputs! try 'selu'
- model.compile(optimizer='rmsprop',loss='mse',metrics=['accuracy']) #try adamax
- return model
- # create model
- model = KerasRegressor(build_fn=create_model, epochs=20, batch_size=10, verbose=0)
- # evaluate using 10-fold cross validation
- kfold = KFold(n_splits=5, shuffle=True, random_state=1)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement