Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- from random import shuffle
- import tensorflow as tf
- def generate_sequence_data(batch_size, sequence_length):
- # sequence data is 60 inputs long and each input is of length 4
- # batch is length 10
- train_input = []
- for i in range(batch_size):
- batch = []
- for j in range(sequence_length):
- inp = [1, 0, 0 , 0]
- batch.append(inp)
- train_input.append(batch)
- return train_input
- def run():
- # this function generates input and creates a very basic graph
- # the graph consists of an input placeholder and the LSTM cell
- #
- batch_size = 1
- num_hidden = 1
- sequence_length = 10
- input_length = 4
- train_input = generate_sequence_data(batch_size, sequence_length)
- # PHASE 1 - build the computation graph
- # create a placeholder for the inputs
- input_placeholder = tf.placeholder(tf.float32, shape=(batch_size, sequence_length, input_length))
- # create the RNN cell
- cell = tf.contrib.rnn.BasicLSTMCell(num_hidden)
- # output is the outputs of the cell for each input
- # state is the final state of the cell
- output, state = tf.nn.dynamic_rnn(cell, input_placeholder, dtype=tf.float32)
- # PHASE 2 - run the graph using DNA sequences that we generated above
- init_op = tf.global_variables_initializer()
- sess = tf.Session()
- sess.run(init_op)
- inp = train_input[0:batch_size]
- # returns the values for each hidden unit at each step of computation
- # this returns an array of length batch_size
- # each element of that array is an array of length sequence_length
- # each element of that array is an array of length num_hidden
- outputs = sess.run(output, {input_placeholder: inp})
- print("output values: ")
- print(outputs)
- # returns a LSTMStateTuple where c = cell state and h = output value
- states = sess.run(state,{input_placeholder: inp})
- print("internal states: ")
- print(states)
- if __name__ == "__main__":
- # arguments are
- # num_hidden, normalization method, max or sum
- run()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement