Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- isTraining = tf.placeholder(tf.bool, name='isTraining')
- images_placeholder = tf.placeholder(tf.float32, [None, training_images.shape[1], training_images.shape[2], 1], name = "input_node")
- with tf.name_scope('Conv1'):
- conv1 = convolution_layer( images_placeholder, 5, 2, 6, 0.001, 1 )
- dropout_layer_1 = tf.layers.dropout(conv1, rate= 0.5, training = isTraining, name = "drop1")
- pool1 = tf.nn.max_pool(dropout_layer_1, [1,2,2,1], [1,2,2,1], 'SAME', data_format='NHWC', name = "pooling_1")
- # pool1 = tf.nn.max_pool(conv1, [1,2,2,1], [1,2,2,1], 'SAME', data_format='NHWC', name = "pooling_1")
- with tf.name_scope('Conv2'):
- conv2 = convolution_layer( pool1, 5, 2, 6, 0.001, 2 )
- dropout_layer_2 = tf.layers.dropout(conv2, rate= 0.5, training = isTraining, name = "drop2")
- pool2 = tf.nn.max_pool(dropout_layer_2, [1,2,2,1], [1,2,2,1], 'SAME', data_format='NHWC', name = "pooling_2")
- # pool2 = tf.nn.max_pool(conv2, [1,2,2,1], [1,2,2,1], 'SAME', data_format='NHWC', name = "pooling_2")
- with tf.name_scope('Conv3'):
- conv3 = convolution_layer( pool2, 5, 2, 6, 0.001, 3 )
- flatten = tf.reshape( conv3, [ -1, 216 ] )
- with tf.name_scope('Dense1'):
- dense1 = fully_connected_layers( flatten, 40, 5, 0.01 )
- with tf.name_scope('Dense2'):
- dense2 = fully_connected_layers( dense1, 4, 6, 0.001 )
- features_output = tf.nn.relu( dense2, "output_node" )
- with tf.Session() as sess:
- saver = tf.train.Saver()
- #tf.train.write_graph(sess.graph.as_graph_def(), "", "input_graph.pb")
- saver.save(sess, './with_dropout.ckpt')
Add Comment
Please, Sign In to add comment