Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from statistics import mean
- import numpy as np
- import matplotlib.pyplot as plt
- from matplotlib import style as stl
- #xf=np.array([1,2,3,4,5,6],dtype=np.float64)
- #yf=np.array([5,4,6,5,6,7],dtype=np.float64)
- stl.use("fivethirtyeight")
- xf=np.array([36961,43621,15694,36231,29945,40588,75255,37709,30899,25486,37497,40398,74105,76725,18317],dtype=np.float64)
- yf=np.array([2503,2992,1042,2487,2014,2805,5062,2643,2126,1784,2641,2766,5047,5312,1215],dtype=np.float64)
- xe= np.array([35680,42514,15162,35298,29800,40255,74532,37464,31030,24843,36172,39552,72545,75352,18031],dtype=np.float64)
- ye=np.array([2217,2761,990,2274,1865,2606,4805,2396,1993,1627,2375,2560,4597,4871,1119],dtype=np.float64)
- def standard_error(x, y, m, b):
- toterror =0
- for i in range(len(x)):
- toterror+=(y[i]-(x[i]*m+b))**2
- toterror=toterror/len(x)
- return toterror
- def gradient(x, y, m, b):
- dm =0
- db=0
- N=float(len(x))
- for i in range(0,len(x)):
- print("gradient.... dm= ",dm,"db==",db)
- dm+=-(2/N)*(x[i]*(y[i]-(x[i]*m+b)))
- db+=-(2/N)*(y[i]-m*x[i]+b)
- # db= -(2/N) * (y[i] - ((m*x[i]) + b))
- #dm= -(2/N) * x[i] * (y[i] - ((m * x[i]) + b))
- return dm,db
- def gradient_decent(x, y,error_threshold):
- m=0
- b=0
- error= 100
- last_error=error+1
- while(error>error_threshold ):
- print("Error: "+str(error))
- dm,db = gradient(x,y,m,b)
- print("dm= ",dm,"db==",db)
- error = standard_error(x,y,m,b)
- m=m-dm*0.2
- b=b-db*0.2
- #b=b+db
- last_error =error
- error = standard_error(x,y,m,b)
- return m,b
- def line_fit(x,y):
- m=(( (mean(x) * mean(y) ) - mean(x*y) ) /
- ( (mean(x)*mean(x)) - mean(x*x) ) )
- b= mean(y)- m*mean(x)
- return m,b
- def square_error(y_orig, y_line):
- return sum( (y_line-y_orig)**2)
- def R2(y_orig,y_line):
- y_mean_line = [mean(y_orig) for y in y_orig]
- SE_reg = square_error(y_orig, y_line)
- SE_mean = square_error(y_orig, y_mean_line)
- return 1-(SE_reg/SE_mean)
- def printus_maximus(x,y):
- m,b= line_fit(x,y)
- regression_line=[(m*x)+b for x in x]
- r_squared = R2(y, regression_line)
- print("R^2 = ",x, r_squared )
- print("Result = ",m,b)
- plt.scatter(x,y)
- plt.plot(x,regression_line)
- def printus_maximus2dot0(x,y):
- m,b= gradient_decent(x,y,0.0005)
- regression_line=[(m*x)+b for x in x]
- plt.scatter(x,y)
- plt.plot(x,regression_line)
- printus_maximus2dot0(xf,yf)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement