Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- prng = np.random.RandomState(1234567891)
- x = prng.rand(10000, 1)
- # x = np.random.rand(10000, 1)
- y = x
- print(f"minimum = {min(x)}")
- time.sleep(1)
- def create_model():
- # dropout_nodes = 0.1
- dropout_nodes = 0.0
- # intermediary_activation = 'linear'
- intermediary_activation = 'relu'
- final_activation = 'linear'
- # initialize sequential model
- model = tf.keras.models.Sequential()
- layer_nodes = [16, 8, 4, 2]
- for i, layer_node in enumerate(layer_nodes):
- if i==0:
- # first layer
- model.add(tf.keras.layers.Dense(layer_node, input_dim=1))
- model.add(tf.keras.layers.Activation(intermediary_activation))
- model.add(tf.keras.layers.Dropout(dropout_nodes))
- # model.add(tf.keras.layers.BatchNormalization()) #we can re-normalize the data after each layer
- else:
- # other layers
- model.add(tf.keras.layers.Dense(layer_node))
- model.add(tf.keras.layers.Activation(intermediary_activation))
- model.add(tf.keras.layers.Dropout(dropout_nodes))
- # model.add(tf.keras.layers.BatchNormalization()) #we can re-normalize the data after each layer
- model.add(tf.keras.layers.Dense(1))
- # model.add(tf.keras.layers.Activation('relu'))
- model.add(tf.keras.layers.Activation(final_activation))
- loss = 'mse'
- metric = ["mae", "mape"]
- opt = tf.keras.optimizers.SGD(learning_rate=1e-2)
- # opt = tf.keras.optimizers.Adam(learning_rate=1e-3)
- model.compile(loss=loss, optimizer=opt, metrics=[metric])
- return model
- model = create_model()
- history = model.fit(x=x, y=y,
- validation_split=0.1, shuffle=False,
- epochs=20,
- batch_size=32,
- verbose=1, )
- pred = model.predict(x)
- df = pd.DataFrame(x, columns=["x"])
- df['y'] = y
- df['pred'] = pred
- model_evaluation = model.evaluate(x, y, verbose=2)
- dict_model_evaluation = {k.name: model_evaluation[i] for i, k in enumerate(model.metrics)}
- print(dict_model_evaluation)
Advertisement
Add Comment
Please, Sign In to add comment