Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- import random
- tf.logging.set_verbosity(tf.logging.INFO)
- def input_fn():
- def gen():
- for i in range(1000000):
- a = random.randint(0,1)
- b = random.randint(0,1)
- c = int(a != b)
- yield {"a": a, "b": b}, [c]
- data = tf.data.Dataset.from_generator(gen, ({"a": tf.int32, "b": tf.int32}, tf.int32))
- data = data.batch(32)
- iterator = data.make_one_shot_iterator()
- return iterator.get_next()
- feature_columns = [tf.feature_column.numeric_column(k) for k in ["a", "b"]]
- with tf.Session() as sess:
- print(sess.run(input_fn()))
- classifier = tf.estimator.DNNClassifier(
- feature_columns=feature_columns, # The input features to our model
- hidden_units=[5, 5], # Two layers, each with 10 neurons
- n_classes=2,
- model_dir='/home/zond/tmp/xor/snap')
- classifier.train(input_fn = input_fn)
- print(classifier.evaluate(input_fn = input_fn))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement