Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def build_mlp(input_placeholder, output_size, scope, n_layers, size, activation=tf.tanh, output_activation=None):
- """
- Builds a feedforward neural network
- arguments:
- input_placeholder: placeholder variable for the state (batch_size, input_size)
- output_size: size of the output layer
- scope: variable scope of the network
- n_layers: number of hidden layers
- size: dimension of the hidden layer
- activation: activation of the hidden layers
- output_activation: activation of the ouput layers
- returns:
- output placeholder of the network (the result of a forward pass)
- Hint: use tf.layers.dense
- """
- # with tf.variable_scope(scope):
- # layers = tf_layers.stack(input_placeholder, tf_layers.fully_connected, [size] * (n_layers - 1),
- # scope = 'fc', activation_fn = activation)
- # output_placeholder = tf_layers.fully_connected(layers, output_size, activation_fn = output_activation)
- out = input_placeholder
- for l in range(n_layers-1):
- out = tf.layers.dense(inputs=out, units=size, activation=activation)
- out = tf.layers.dense(inputs=out, units=output_size, activation=output_activation)
- return out
- return output_placeholder
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement