Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Initial Tensor for concatenating the tensors to after each while loop
- tf_padded_final = tf.zeros(shape=[1,max_sent_seq_len,output_size * 2])
- # 1-D vector of size = document batch size which is the number of sentences in a document (used for the tf.bidirectional_dynamic_rnn sequence lengths argument)
- sentence_batch_len = tf.placeholder(shape=[None],dtype=tf.int32,name="sentence_batch_len")
- # This is a 2-D array, with the first index being the start sentence index of the document to gather, and the second index is the last sentence index of the document to gather.
- **sentence_index_offsets = tf.placeholder(shape=[None,2],dtype=tf.int32,name="sentence_index_offsets")**
- i = tf.constant(0)
- def while_cond (i, tf_padded_final):
- mb = tf.constant(mini_batch_size)
- return tf.less(i,mb)
- # Loop through the mini batch of Documents (not sentences) one at a time
- # And rollup the sentence vectors into a single row (1 row per document)
- # While loop was used as there can be multiple sentence counts across documents
- def body(i,tf_padded_final):
- #tf.print(i,[i])
- end_idx = sentence_index_offsets[i,1]
- st_idx = sentence_index_offsets[i,0]
- tf_range = tf.range(start=st_idx,limit=end_idx)
- pad_len = max_sent_seq_len - sentence_batch_len[i]
- tf_slice = tf.gather(outputs,tf_range)
- tf_slice_padding = [[0, pad_len], [0, 0]]
- tf_slice_padded = tf.pad(tf_slice, tf_slice_padding, 'CONSTANT')
- tf_slice_padded_3D = tf.expand_dims(tf_slice_padded, axis=0)
- tf_padded_final = tf.concat([tf_padded_final,tf_slice_padded_3D],axis=0)
- i = tf.add(i,1)
- return i, tf_padded_final
- _, tf_padded_final_2 = tf.while_loop(while_cond, body, [i, tf_padded_final],shape_invariants=[i.get_shape(),tf.TensorShape([None,12,20])])
Add Comment
Please, Sign In to add comment