Guest User

Untitled

a guest
Apr 26th, 2018
64
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.77 KB | None | 0 0
  1. LSTM_cells = 200
  2. num_classes = 2
  3. lstm_cell = rnn.BasicLSTMCell(LSTM_cells, forget_bias=1.0)
  4. state_in = lstm_cell.zero_state(1, tf.float32)
  5.  
  6. X = tf.placeholder(tf.float32, [None, 3])
  7. Y = tf.placeholder(tf.float32, [None, num_classes])
  8.  
  9. X = tf.reshape(X, [1, -1, 3])
  10. rnnex_t, rnn_state = tf.nn.dynamic_rnn(
  11. inputs=X, cell=lstm_cell, dtype=tf.float32, initial_state=state_in)
  12. rnnex = tf.reshape(rnnex_t, [-1, LSTM_cells])
  13. out = tf.add(tf.matmul(rnnex, weights['out']), biases['out'])
  14. logits = tf.reshape(out, [-1, num_classes])
  15. prediction = tf.nn.softmax(logits)
  16.  
  17. # Define loss and optimizer
  18. loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=Y))
  19. optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
  20. train_op = optimizer.minimize(loss_op)
Add Comment
Please, Sign In to add comment