Advertisement
padznich

neural_net_test.py

Nov 20th, 2018
398
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.70 KB | None | 0 0
  1. import tensorflow as tf
  2.  
  3. raw_data = open("./data_sets/test_data_udp.txt"). readlines()
  4. raw_labels = open("./data_sets/test_label_udp.txt"). readlines()
  5.  
  6. x = []
  7. y = []
  8.  
  9. for _ in raw_data:
  10.     _ = _.split()
  11.     x.append([int(_[0]), int(_[1]), int(_[2]), int(_[3])])
  12.  
  13.  
  14. for _ in raw_labels:
  15.     y.append([0, 1])
  16.  
  17.  
  18. nodesForLayerInput = 4
  19. nodesForLayer1 = 50
  20. nodesForLayer2 = 50
  21. nodesForLayer3 = 50
  22. nodesForLayerOut = 1
  23.  
  24. numberOfClassesOut = 2
  25.  
  26. data = tf.placeholder('float', shape=[None, 4])
  27. label = tf.placeholder('float')
  28.  
  29. layer1 = {
  30.     'w': tf.Variable(tf.zeros([4, nodesForLayer1])),
  31.     'b': tf.Variable(tf.zeros([nodesForLayer1]))
  32. }
  33.  
  34.  
  35. layer2 = {
  36.     'w': tf.Variable(tf.zeros([nodesForLayer1, nodesForLayer2])),
  37.     'b': tf.Variable(tf.zeros([nodesForLayer2]))
  38. }
  39.  
  40. layer3 = {
  41.     'w': tf.Variable(tf.zeros([nodesForLayer2, nodesForLayer3])),
  42.     'b': tf.Variable(tf.zeros([nodesForLayer3]))
  43. }
  44.  
  45. layerOut = {
  46.     'w': tf.Variable(tf.zeros([nodesForLayer3, numberOfClassesOut])),
  47.     'b': tf.Variable(tf.zeros([numberOfClassesOut]))
  48. }
  49.  
  50. saver = tf.train.Saver()
  51.  
  52.  
  53. def graph(_data):
  54.     ans_layer1 = tf.nn.relu(tf.add(tf.matmul(_data, layer1['w']), layer1['b']))
  55.     ans_layer2 = tf.nn.relu(tf.add(tf.matmul(ans_layer1, layer2['w']), layer2['b']))
  56.     ans_layer3 = tf.nn.relu(tf.add(tf.matmul(ans_layer2, layer3['w']), layer3['b']))
  57.    
  58.     ans_layer_out = tf.add(tf.matmul(ans_layer3, layerOut['w']), layerOut['b'])
  59.     return ans_layer_out
  60.  
  61.  
  62. def train(_x):
  63.    
  64.     prediction = graph(_x)
  65.    
  66.     cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(_sentinel=None,
  67.                                                                   logits=prediction,
  68.                                                                   labels=label,
  69.                                                                   dim=-1,
  70.                                                                   name=None))
  71.     optimiser = tf.train.AdamOptimizer().minimize(cost)
  72.    
  73.     n_epochs = 1
  74.    
  75.     with tf.Session() as sess:
  76.        
  77.         sess.run(tf.global_variables_initializer())
  78.        
  79.         saver.restore(sess, "./model_test/model_train.ckpt")
  80.        
  81.         for epoch in range(n_epochs):
  82.            
  83.             epoch_loss = 0
  84.            
  85.             for i in range(100):
  86.                
  87.                 i, c = sess.run([optimiser, cost], feed_dict={data: x, label: y})
  88.                
  89.                 epoch_loss += c
  90.                 print(c)
  91.        
  92.         correct = tf.equal(tf.argmax(prediction, 1), tf.argmax(label, 1))
  93.         accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
  94.         print("Accuracy ", accuracy.eval({data: x, label: y}))
  95.  
  96.  
  97. train(data)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement