Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- #####################
- # preparation stuff #
- #####################
- # define input and output data
- input_data = [[0., 0.], [0., 1.], [1., 0.], [1., 1.]] # XOR input
- output_data = [0., 1., 1., 0.] # XOR output
- # create a placeholder for the input
- # None indicates a variable batch size for the input
- # one input's dimension is [1, 2]
- n_input = tf.placeholder(tf.float32, shape=[None, 2])
- # number of neurons in the hidden layer
- hidden_nodes = 5
- ################
- # hidden layer #
- ################
- b_hidden = tf.Variable(0.1) # hidden layer's bias neuron
- W_hidden = tf.Variable(tf.random_uniform([hidden_nodes, 2], -1.0, 1.0)) # hidden layer's weight matrix
- # initialized with a uniform distribution
- hidden = tf.sigmoid(tf.matmul(W_hidden, n_input) + b_hidden) # calc hidden layer's activation
- ################
- # output layer #
- ################
- W_output = tf.Variable(tf.random_uniform([hidden_nodes, 1], -1.0, 1.0)) # output layer's weight matrix
- output = tf.sigmoid(tf.matmul(W_output, hidden)) # calc output layer's activation
- ############
- # learning #
- ############
- cross_entropy = tf.nn.sigmoid_cross_entropy_with_logits(output, n_input) # calc cross entropy between current
- # output and desired output
- loss = tf.reduce_mean(cross_entropy) # mean the cross_entropy
- optimizer = tf.train.GradientDescentOptimizer(0.1) # take a gradient descent for optimizing with a "stepsize" of 0.1
- train = optimizer.minimize(loss) # let the optimizer train
- ####################
- # initialize graph #
- ####################
- init = tf.initialize_all_variables()
- sess = tf.Session() # create the session and therefore the graph
- sess.run(init) # initialize all variables
- # train the network
- for epoch in xrange(0, 201):
- sess.run(train) # run the training operation
- if epoch % 20 == 0:
- print("step: {:>3} | W: {} | b: {}".format(epoch, sess.run(W_hidden), sess.run(b_hidden)))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement