Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- with tf.device("/gpu:0"):
- x = tf.placeholder(tf.float32, [None, feature_dim], name="input")
- y_ = tf.placeholder(tf.float32, [None, output_dim],name="output")
- phase = tf.placeholder(tf.bool, name='phase')
- weights = {
- 'hidden1': tf.Variable(tf.random_normal([feature_dim, hidden_num_units1], stddev=1 / np.sqrt(feature_dim)), name="w1"),
- 'hidden2': tf.Variable(
- tf.random_normal([hidden_num_units1, hidden_num_units2], stddev=1 / np.sqrt(hidden_num_units1)), name="w2"),
- 'output': tf.Variable(
- tf.random_normal([hidden_num_units2, output_num_units], stddev=1 / np.sqrt(hidden_num_units2)), name="w3")
- }
- biases = {
- 'hidden1': tf.Variable(tf.random_normal([hidden_num_units1], stddev=1 / np.sqrt(hidden_num_units1)), name="b1"),
- 'hidden2': tf.Variable(tf.random_normal([hidden_num_units2], stddev=1 / np.sqrt(hidden_num_units2)), name="b2"),
- 'output': tf.Variable(tf.random_normal([output_num_units], stddev=1 / np.sqrt(output_num_units)), name="b3")
- }
- def network1(data):
- with tf.name_scope(name="layer_1"):
- h1 = tf.add(tf.matmul(data, weights['hidden1']), biases['hidden1'])
- h1_BN = tf.contrib.layers.batch_norm(h1,
- center=True, scale=True,
- is_training=phase,
- scope='hidden_layer1_bn')
- h1_relu = tf.nn.relu(h1_BN)
- with tf.name_scope(name="layer_2"):
- h2 = tf.add(tf.matmul(h1_relu, weights['hidden2']), biases['hidden2'])
- h2_BN = tf.contrib.layers.batch_norm(h2,
- center=True, scale=True,
- is_training=phase,
- scope='hidden_layer2_bn')
- h2_relu = tf.nn.relu(h2_BN)
- with tf.name_scope(name="output_layer"):
- output_layer = tf.add(tf.matmul(h2_relu, weights['output']), biases['output'])
- output__layer_BN = tf.contrib.layers.batch_norm(output_layer,
- center=True, scale=True,
- is_training=phase,
- scope='output_bn')
- output = tf.sigmoid(output__layer_BN, name="f" )
- return output
- def network1(data):
- weights = {
- 'hidden1': tf.Variable(tf.random_normal([feature_dim, hidden_num_units1], stddev=1 / np.sqrt(feature_dim)), name="w1"),
- 'hidden2': tf.Variable(
- tf.random_normal([hidden_num_units1, hidden_num_units2], stddev=1 / np.sqrt(hidden_num_units1)), name="w2"),
- 'output': tf.Variable(
- tf.random_normal([hidden_num_units2, output_num_units], stddev=1 / np.sqrt(hidden_num_units2)), name="w3")
- }
- biases = {
- 'hidden1': tf.Variable(tf.random_normal([hidden_num_units1], stddev=1 / np.sqrt(hidden_num_units1)), name="b1"),
- 'hidden2': tf.Variable(tf.random_normal([hidden_num_units2], stddev=1 / np.sqrt(hidden_num_units2)), name="b2"),
- 'output': tf.Variable(tf.random_normal([output_num_units], stddev=1 / np.sqrt(output_num_units)), name="b3")
- }
- with tf.name_scope(name = "layer_1"):
- hidden_layer1 = tf.add(tf.matmul(data, weights['hidden1']), biases['hidden1'])
- hidden_layer1 = tf.nn.relu(hidden_layer1)
- with tf.name_scope(name = "layer_1_dropout"):
- drop_out1 = tf.nn.dropout(hidden_layer1, keep_prob) # DROP-OUT here
- with tf.name_scope(name = "layer_2"):
- hidden_layer2 = tf.add(tf.matmul(drop_out1, weights['hidden2']), biases['hidden2'])
- hidden_layer2 = tf.nn.relu(hidden_layer2)
- with tf.name_scope(name = "layer_2_dropout"):
- drop_out2 = tf.nn.dropout(hidden_layer2, keep_prob) # DROP-OUT here
- with tf.name_scope(name = "output_layer"):
- output = tf.add(tf.matmul(drop_out2, weights['output']), biases['output'])
- output = tf.nn.sigmoid(output, name="f")
- return output
Add Comment
Please, Sign In to add comment