Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- import numpy as np
- import csv
- def gen_dataset():
- up = [i for i in range(10)]
- down = [9-i for i in range(10)]
- with open('./est_dataset.csv', 'w') as f:
- writer = csv.writer(f, delimiter=',')
- for i in range(1000):
- writer.writerow([1] + up)
- writer.writerow([0] + down)
- def input_fn():
- dataset = tf.contrib.data.TextLineDataset('./est_dataset.csv')
- dataset = dataset.shuffle(7777).batch(10)
- itr = dataset.make_one_shot_iterator()
- batch = itr.get_next()
- batch = tf.decode_csv(batch, [[0]]*11)
- train = tf.cast(tf.stack(batch[1:], axis=1), dtype=tf.float32)
- label = tf.cast(batch[0], dtype=tf.float32)
- return train, label
- # train, label = input_fn()
- # with tf.Session() as sess:
- # _train, _label = sess.run([train, label])
- # for t, l in zip(_train, _label):
- # print(t, l)
- def model_fn(features, labels, mode):
- layer1 = tf.layers.dense(features, 10)
- layer2 = tf.layers.dense(layer1, 10)
- out = tf.layers.dense(layer2, 1)
- out = tf.reshape(out, [-1])
- global_step = tf.train.get_global_step()
- loss = tf.losses.sigmoid_cross_entropy(labels, out)
- train_op = tf.train.GradientDescentOptimizer(1e-2).minimize(loss, global_step)
- return tf.estimator.EstimatorSpec(mode=mode, train_op=train_op, loss=loss)
- est = tf.estimator.Estimator(model_fn, model_dir='./est_logs/')
- for i in range(100):
- est.train(input_fn)
Add Comment
Please, Sign In to add comment