Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- from tensorflow.examples.tutorials.mnist import input_data
- mnist = input_data.read_data_sets("/tmp/data/", one_hot = True)
- batch_size = 100
- #MODEL
- def neural_network_model(data):
- weights = {'W_conv': tf.Variable(tf.random_normal([5,5,1,32])),
- 'W_fc': tf.Variable(tf.random_normal([14*14*32,100])),
- 'W_out': tf.Variable(tf.random_normal([100,10])),}
- biases = {'b_conv':tf.Variable(tf.random_normal([32])),
- 'b_fc':tf.Variable(tf.random_normal([100])),
- 'b_out':tf.Variable(tf.random_normal([10])),
- }
- x = tf.reshape(data,shape=[-1,28,28,1])
- c = tf.nn.conv2d(x,weights['W_conv'], strides = [1,1,1,1],padding= 'SAME')
- c += biases["b_conv"]
- c = tf.nn.relu(c)
- m = tf.nn.max_pool(c,ksize=[1,2,2,1], strides = [1,2,2,1], padding = "SAME")
- fc = tf.reshape(m,[-1,14*14*32])
- fc = tf.nn.relu(tf.add(tf.matmul(fc,weights['W_fc']),biases["b_fc"]))
- output = tf.nn.relu(tf.matmul(fc,weights['W_out'])+biases["b_out"])
- return output
- #GRAF
- x = tf.placeholder('float',[None,784])
- y = tf.placeholder('float')
- predictions = neural_network_model(x)
- cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits=predictions,labels=y))
- optimizer = tf.train.AdamOptimizer().minimize(cost)
- saver = tf.train.Saver()
- # WYWOŁANIE SESJI
- hm_epochs =3
- with tf.Session() as sess:
- sess.run(tf.global_variables_initializer())
- for epoch in range(hm_epochs):
- epoch_loss =0
- for _ in range(int(mnist.train.num_examples/batch_size)):
- epoch_x,epoch_y = mnist.train.next_batch(batch_size)
- _, c = sess.run([optimizer,cost],feed_dict = {x:epoch_x,y:epoch_y})
- epoch_loss +=c
- print("Epoch",epoch,'completed out of',hm_epochs,'loss:', epoch_loss)
- correct = tf.equal(tf.argmax(predictions,1), tf.argmax(y,1))
- accuracy = tf.reduce_mean(tf.cast(correct,'float'))
- print("Accuracy:",accuracy.eval({x:mnist.train.images, y:mnist.train.labels}))
- save_path = saver.save(sess, "/tmp/model.ckpt")
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement