SHARE
TWEET

Untitled

a guest Jun 27th, 2019 70 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. def forward_propagate(X,parameters):
  2.     w1 = parameters["W1"]
  3.     w2 = parameters["W2"]
  4.     layer1 = tf.nn.conv2d(X,w1,strides=[1,1,1,1],padding="SAME")
  5.     layer1_activation = tf.nn.relu(layer1)
  6.     layer1_output = tf.nn.max_pool(layer1_activation,ksize=[1,4,4,1],strides=[1,4,4,1],padding="SAME")
  7.     layer2 = tf.nn.conv2d(layer1_output,w2,strides=[1,1,1,1],padding="SAME")
  8.     layer2_activation = tf.nn.relu(layer2)
  9.     layer2_output = tf.nn.max_pool(layer2_activation,ksize=[1,4,4,1],strides=[1,4,4,1],padding="SAME")
  10.     fltn = tf.contrib.layers.flatten(layer2_output)
  11.     z3 = tf.contrib.layers.fully_connected(fltn,10,activation_fn=None)
  12.     return z3
  13.  
  14.  
  15.  
  16. def model_evaluate(trainX,trainY,testX,testY,predictX,alpha= 0.01,mini_batch = 64,num_epochs=50,print_cost=True):
  17.     ops.reset_default_graph()                      
  18.     tf.set_random_seed(1)            
  19.     seed = 3    
  20.     (m, n_H0, n_W0, n_C0) = trainX.shape            
  21.     n_y = trainY.shape[1]                            
  22.     costs = []                
  23.     X, Y = createPlaceholders(n_H0,n_W0,n_C0,n_y)
  24.     parameters = initialize_parameters()
  25.     Z3 = forward_propagate(X, parameters)
  26.     pred  = forward_propagate(X,parameters)
  27.     cost = compute_cost(Z3,Y)
  28.  
  29.     optimizer = tf.train.AdamOptimizer(learning_rate=alpha).minimize(cost)
  30.  
  31.     init = tf.global_variables_initializer()
  32.  
  33.     with tf.Session() as sess:
  34.         sess.run(init)
  35.         for epoch in range(num_epochs):
  36.             minibatch_cost = 0.
  37.             num_minibatches = int(m / mini_batch)
  38.             seed = seed + 1
  39.             minibatches = random_mini_batches(trainX, trainY, mini_batch, seed)
  40.             for minibatch in minibatches:
  41.                 (minibatch_X, minibatch_Y) = minibatch
  42.                 _ , temp_cost = sess.run([optimizer,cost],feed_dict={X:minibatch_X,Y:minibatch_Y})
  43.                 minibatch_cost += temp_cost / num_minibatches
  44.             if print_cost == True and epoch % 5 == 0:
  45.                 print ("Cost after epoch %i: %f" % (epoch, minibatch_cost))
  46.             if print_cost == True and epoch % 1 == 0:
  47.                 costs.append(minibatch_cost)
  48.  
  49.         plt.plot(np.squeeze(costs))
  50.         plt.ylabel('cost')
  51.         plt.xlabel('iterations (per tens)')
  52.         plt.title("Learning rate =" + str(alpha))
  53.         plt.show()
  54.  
  55.         predict_op = tf.argmax(Z3, 1)
  56.         correct_prediction = tf.equal(predict_op, tf.argmax(Y, 1))
  57.         accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
  58.         print(accuracy)
  59.         train_accuracy = accuracy.eval({X: trainX, Y: trainY})
  60.         test_accuracy = accuracy.eval({X: testX, Y: testY})
  61.         print("Train Accuracy:", train_accuracy)
  62.         print("Test Accuracy:", test_accuracy)
  63.  
  64.        #this is where i am doing my prediction
  65.         predict_test = tf.argmax(pred, 1)
  66.         test_prediction = predict_test.eval(feed_dict={X:predictX})
  67.  
  68.         return train_accuracy, test_accuracy, parameters,test_prediction
  69.  
  70.  
  71. _, _, parameters,predictions= model_evaluate(trainx,trainy,testx,testy,test,alpha= 0.001,mini_batch = 64,num_epochs=40,print_cost=True)
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
 
Top