Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- import numpy as np
- import matplotlib.pyplot as plt
- # some constants used by the learning algorith
- learning_rate = 0.01
- training_epochs = 40
- trX = np.linspace(-1, 1, 101)
- num_coeffs = 6
- trY_coeffs = [1, 2, 3, 4, 5, 6]
- trY = 0
- # output data based on 5 polynomial
- for i in range(num_coeffs):
- trY += trY_coeffs[i] * np.power(trX, i)
- trY += np.random.randn(*trX.shape) * 1.5 # add some noise
- X = tf.placeholder(tf.float32)
- Y = tf.placeholder(tf.float32)
- def model(X, w):
- terms = []
- for i in range(num_coeffs):
- term = tf.multiply(w[i], tf.pow(X, i))
- terms.append(term)
- return tf.add_n(terms)
- w = tf.Variable([0.0] * num_coeffs, name = "parameters")
- y_model = model(X, w)
- # cost function
- cost = tf.pow(Y - y_model, 2)
- # operation that will be called on each iteration of the learning algorithm
- train_op = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
- sess = tf.Session()
- init = tf.global_variables_initializer()
- sess.run(init)
- for epoch in range(training_epochs):
- for (x, y) in zip(trX, trY):
- sess.run(train_op, feed_dict={X: x, Y: y})
- w_val = sess.run(w) # obtain the final parameter value
- print(w_val)
- sess.close()
- plt.scatter(trX, trY)
- trY2 = 0
- for i in range(num_coeffs):
- trY2 += w_val[i] * np.power(trX, i)
- plt.plot(trX, trY2, 'r')
- plt.show()
Add Comment
Please, Sign In to add comment