Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- HIDDEN_LAYERS = 64
- X = tf.placeholder("float", [None, STRING_LENGTH, len(CHARS)])
- y = tf.placeholder("float", [None, len(CHARS)])
- X_seq = tf.unstack(X, STRING_LENGTH, 1)
- lstm_cell = tf.contrib.rnn.BasicLSTMCell(HIDDEN_LAYERS)
- #sequence of 12 chars to output of 7
- outputs, states = tf.contrib.rnn.static_rnn(lstm_cell, X_seq, dtype=tf.float32)
- final_output = outputs[-1]
- weights = tf.get_variable("weights", [HIDDEN_LAYERS, len(CHARS)], initializer=tf.random_normal_initializer())
- biases = tf.get_variable("biases", [len(CHARS)], initializer=tf.random_normal_initializer())
- prediction = tf.add(tf.matmul(final_output, weights), biases)
- cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y))
- optimizer = tf.train.AdamOptimizer(1e-2)
- train_op = optimizer.minimize(cost)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement