Advertisement
Schlez

Shouldbe

Nov 15th, 2016
184
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 3.87 KB | None | 0 0
  1. import tensorflow as tf
  2. tf.reset_default_graph()
  3.  
  4. # Input properties
  5. xSize = 13 #xData[0].shape[1] #13
  6. ySize = 18 #xData[0].shape[0] #18
  7.  
  8. # Network Parameter
  9. n_input = xSize * ySize
  10. n_hidden_1 = 600
  11. n_hidden_2 = 500
  12.  
  13. learning_rate = 0.1
  14. power_t=0.25
  15. #epoch = 100000
  16. epoch = 100000
  17. dropout = 1.0
  18. batch_size = 100
  19. bias_start = 0.0
  20.  
  21.  
  22. def reg_perceptron(t, weights, biases):
  23.     t = tf.nn.relu(tf.add(tf.matmul(t, weights['h1']), biases['b1']), name = "layer_1")
  24.     t = tf.nn.sigmoid(tf.add(tf.matmul(t, weights['h2']), biases['b2']), name = "layer_2")
  25.     t = tf.add(tf.matmul(t, weights['hOut'], name="LOut_MatMul"), biases['bOut'], name="LOut_Add")
  26.  
  27.     return tf.reshape(t, [-1], name="Y_GroundTruth")
  28.  
  29. def sum_of_squares(predictions, targets):
  30.     predictions.get_shape().assert_is_compatible_with(targets.get_shape()) # Just a check
  31.     predictions = tf.to_float(predictions) * const200
  32.     targets = tf.to_float(targets) * const200
  33.    
  34.     losses = tf.square(tf.sub(predictions, targets))
  35.     RMSE = tf.sqrt(tf.reduce_mean(losses))    
  36.     return RMSE
  37.  
  38.  
  39.  
  40. # Tensor placeholders and variables
  41. _x = tf.placeholder(tf.float32, [None, n_input], name="X_Input")
  42. _y = tf.placeholder(tf.float32, [None], name="Y_GroundTruth")
  43. _adaptive_learning_rate = tf.placeholder(tf.float32, shape=[], name="adaptive_learning_rate")
  44. _epoch_count = tf.Variable(0, dtype=tf.float32, name="epoch_count")
  45.  
  46. _cost = tf.Variable(1, dtype=tf.float32, name="cost")
  47. variable_summaries(_cost, "cost")
  48.  
  49. _error = tf.Variable(tf.zeros([len(test_y)]), dtype=tf.float32, name="error")
  50. variable_summaries(_error, "error")
  51.  
  52. const200 = tf.constant(200.0, dtype=tf.float32)
  53.  
  54.  
  55. # Network weights and biases
  56. # Why xavier initialization should be good: https://www.quora.com/What-is-an-intuitive-explanation-of-the-Xavier-Initialization-for-Deep-Neural-Networks
  57. # Paper: http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf
  58. rg_weights = {
  59.     'h1': vs.get_variable("weights0", [n_input, n_hidden_1], initializer=tf.contrib.layers.xavier_initializer()),
  60.     'h2': vs.get_variable("weights1", [n_hidden_1, n_hidden_2], initializer=tf.contrib.layers.xavier_initializer()),
  61.     'hOut': vs.get_variable("weightsOut", [n_hidden_2, 1], initializer=tf.contrib.layers.xavier_initializer())
  62. }
  63.  
  64. for key, value in rg_weights.iteritems():
  65.     variable_summaries(value, 'weights/' + key)
  66.  
  67. rg_biases = {
  68.     'b1': vs.get_variable("bias0", [n_hidden_1], initializer=init_ops.constant_initializer(bias_start)),
  69.     'b2': vs.get_variable("bias1", [n_hidden_2], initializer=init_ops.constant_initializer(bias_start)),
  70.     'bOut': vs.get_variable("biasOut", [1], initializer=init_ops.constant_initializer(bias_start))
  71. }
  72. for key, value in rg_biases.iteritems():
  73.     variable_summaries(value, 'biases/' + key)
  74.  
  75. # Network layer definitions
  76. pred = reg_perceptron(_x, rg_weights, rg_biases)
  77. print(str(pred))
  78.  
  79. # Definition of cost function
  80. cost = sum_of_squares(pred, _y)
  81.  
  82. # Create optimizer
  83. optimizer =  tf.train.AdagradOptimizer(learning_rate=_adaptive_learning_rate).minimize(cost)
  84.  
  85. # Create summary for TensorBoard
  86. merged_summary = tf.merge_all_summaries()
  87. timestamp = int(time.time())
  88. print "Starting session, TS =", timestamp
  89. train_writer = tf.train.SummaryWriter('/data/tensorboard/_' + angle + str(file_count) + '_' + str(timestamp)+ '_Leslie' + '/', graph=tf.get_default_graph())
  90.  
  91.  
  92. # Session operations
  93. init_op = tf.initialize_all_variables()
  94. inc_epoch_op = _epoch_count.assign_add(1.0) # increase epoch counter by 1
  95. saver = tf.train.Saver()
  96. sess = tf.Session()
  97.  
  98. #saver.restore(sess, model_save_path)
  99. saver.restore(sess, "/data/tensorboard/models/Y6_1476978999")
  100. sess.run(tf.initialize_all_variables())
  101.  
  102. with tf.Session() as sess:
  103.     saver.restore(sess, "/data/tensorboard/models/Y6_1476978999")
  104.     feed_dict={_x: test_x, _y: test_y}
  105.     pred_y, cost = sess.run([pred, cost], feed_dict=feed_dict)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement