Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def build_model(learning_rate, hidden_nodes = 64, dropout_rate = 0.2, activation_function = tf.nn.relu):
- model = keras.Sequential([
- layers.Dense(hidden_nodes, activation=activation_function, input_shape=[len(train_dataset.keys())]),
- layers.Dropout(dropout_rate),
- layers.Dense(hidden_nodes, activation=activation_function),
- layers.Dense(1)
- ])
- optimizer = tf.keras.optimizers.RMSprop(learning_rate)
- model.compile(loss='mean_squared_error',
- optimizer=optimizer,
- metrics=['mean_absolute_error', 'mean_squared_error'])
- return model
- from keras.wrappers.scikit_learn import KerasRegressor
- from sklearn.model_selection import RandomizedSearchCV
- EPOCHS = 1000
- learning_rates = [0.001, 0.01, 0.1, 0.2, 0.3]
- hidden_nodes = [16, 32, 40, 64, 80]
- dropout_rates = [0.1, 0.2, 0.3, 0.4]
- activation_functions = [tf.nn.relu, tf.nn.leaky_relu]
- hyperparameters = dict(learning_rate = learning_rates, hidden_nodes = hidden_nodes, dropout_rate = dropout_rates, activation_function = activation_functions)
- hyperparameters
- model = KerasRegressor(build_fn = build_model, epochs=EPOCHS)
- random_search = RandomizedSearchCV(estimator = model, param_distributions = hyperparameters)
- random_search.fit(train_dataset, train_labels)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement