Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #! /usr/bin/env python3
- import os
- import tensorflow as tf
- import numpy as np
- import tempfile
- from tensorflow.python import debug as tfdbg
- def watch_session(dump_root_dir, train_op, times):
- ''' Dump tensors to temperary directory for analysis'''
- watch_opt = tfdbg.WatchOptions(
- debug_ops="DebugIdentity", node_name_regex_whitelist=r".*")
- def my_watch_fn(fetches, feeds):
- return watch_opt
- with tf.Session() as sess:
- sess.run(tf.global_variables_initializer())
- sess = tfdbg.DumpingDebugWrapperSession(sess, watch_fn=my_watch_fn,
- session_root=dump_root_dir)
- for _ in range(times):
- sess.run(train_op)
- def next_nan_or_inf(dump_root_dir):
- for folder in sorted(os.listdir(dump_root_dir)):
- mydir = tfdbg.DebugDumpDir(os.path.join(dump_root_dir, folder))
- for data in sorted(mydir.dumped_tensor_data,
- key=lambda x: x.timestamp):
- try:
- tensor = data.get_tensor()
- if 'float' in tensor.dtype.name:
- if np.isnan(tensor).any() or np.isinf(tensor).any():
- yield data
- except AttributeError:
- # Some tensors may be uninitialized, we skip those
- pass
- if __name__ == __main__:
- NUM_RUNS = 3
- NUM_PRINT_TENSORS = 10
- train_op = tf.constant([float('NaN')])
- dump_root_dir = tempfile.mkdtemp()
- print('dump_root_dir: %s' % dump_root_dir)
- watch_session(dump_root_dir, train_op, NUM_RUNS)
- count = 0
- for data in next_nan_or_inf(dump_root_dir):
- print('-'*40)
- print('[*] Round %s: %s' % (
- data.file_path.split('/')[3][-1], data.tensor_name))
- count += 1
- if count > NUM_PRINT_TENSORS:
- break
Add Comment
Please, Sign In to add comment