Advertisement
Guest User

Untitled

a guest
Jul 23rd, 2017
66
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 0.95 KB | None | 0 0
  1.   # SOLUTION: Layer 3: Fully Connected. Input = 400. Output = 120.
  2.     fc1_W = tf.Variable(tf.truncated_normal(shape=(400, 120), mean = mu, stddev = sigma))
  3.     fc1_b = tf.Variable(tf.zeros(120))
  4.     fc1   = tf.matmul(fc0, fc1_W) + fc1_b
  5.    
  6.     # SOLUTION: Activation.
  7.     fc1    = tf.nn.relu(fc1)
  8.     fc1    = tf.nn.dropout(fc1, keep_prob)
  9.  
  10.     # SOLUTION: Layer 4: Fully Connected. Input = 120. Output = 84.
  11.     fc2_W  = tf.Variable(tf.truncated_normal(shape=(120, 84), mean = mu, stddev = sigma))
  12.     fc2_b  = tf.Variable(tf.zeros(84))
  13.     fc2    = tf.matmul(fc1, fc2_W) + fc2_b
  14.    
  15.     # SOLUTION: Activation.
  16.     fc2    = tf.nn.relu(fc2)
  17.     fc2    = tf.nn.dropout(fc2, keep_prob)
  18.  
  19.     # SOLUTION: Layer 5: Fully Connected. Input = 84. Output = 10.
  20.     fc3_W  = tf.Variable(tf.truncated_normal(shape=(84, 43), mean = mu, stddev = sigma))
  21.     fc3_b  = tf.Variable(tf.zeros(43))
  22.     logits = tf.matmul(fc2, fc3_W) + fc3_b
  23.    
  24.     return logits
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement