Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import matplotlib.pyplot as plt
- import random
- #--- Gradient descent\
- # polynomial degree
- pol_deg = 2
- # a0 - a5
- A = np.zeros(pol_deg+1)
- A = np.array([random.random() for i in range(pol_deg+1)])
- # h(x) = a0 + a1*x + a2*x**2 + ... + am*x**m, where (m = pol_deg) and (ai = A[i])
- # It's important to find proper alfa
- # if it's too big convergion won't be able
- # if too small convergion takes many time
- alfa = 0.1
- X = np.array([0, -1, 1, 2])
- Y = np.array([0, 0, 2, 1])
- def h(X_arr = X):
- return np.array(sum([A[i]*X_arr**i for i in range(len(A))]))
- def derivative():
- h_val = h()
- comm = h_val - Y
- # Array of derrivatives for all ai in A
- return np.array([sum(comm*X**i) * alfa**i for i in range(len(A))]) * (1 / (len(X)))
- # Yes, alfa**i don't have any realtions with derivation but only this way the programm works (\_/)
- # (-_-)
- def J():
- h_val = h()
- # Error for current model. Dispertion or something like that, i'm bad in Spanish songs
- return sum((h_val-Y)**2)/(2*len(h_val))
- for i in range(10000):
- # The heart of gradient descent
- # Operating with array of ai and array of derivatives J(ai)
- A -= derivative() * alfa
- print("--\nITERATION ", i)
- print("A = ", A)
- print("ERROR = ", J())
- if (i % 500 == 0):
- plt.clf()
- plt.suptitle("iter = " + str(i) + str(A), fontsize=14, fontweight='bold')
- plt.scatter(X, Y)
- x_ap = np.linspace(0, 10, 50)
- y_ap = h(x_ap)
- plt.plot(x_ap, y_ap)
- plt.xlabel(i)
- plt.text(5, 15, 'iter: ' + str(i),
- verticalalignment='bottom', horizontalalignment='left',
- color='green', fontsize=10)
- plt.pause(0.01)
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement