Advertisement
Guest User

Untitled

a guest
Feb 20th, 2018
71
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.60 KB | None | 0 0
  1. from statistics import mean
  2. import numpy as np
  3. import matplotlib.pyplot as plt
  4. from matplotlib import style as stl
  5.  
  6.  
  7. #xf=np.array([1,2,3,4,5,6],dtype=np.float64)
  8. #yf=np.array([5,4,6,5,6,7],dtype=np.float64)
  9.  
  10.  
  11. stl.use("fivethirtyeight")
  12. xf=np.array([36961,43621,15694,36231,29945,40588,75255,37709,30899,25486,37497,40398,74105,76725,18317],dtype=np.float64)
  13. yf=np.array([2503,2992,1042,2487,2014,2805,5062,2643,2126,1784,2641,2766,5047,5312,1215],dtype=np.float64)
  14.  
  15.  
  16.  
  17. xe= np.array([35680,42514,15162,35298,29800,40255,74532,37464,31030,24843,36172,39552,72545,75352,18031],dtype=np.float64)
  18. ye=np.array([2217,2761,990,2274,1865,2606,4805,2396,1993,1627,2375,2560,4597,4871,1119],dtype=np.float64)
  19.  
  20. def standard_error(x, y, m, b):
  21. toterror =0
  22. for i in range(len(x)):
  23. toterror+=(y[i]-(x[i]*m+b))**2
  24. toterror=toterror/len(x)
  25. return toterror
  26.  
  27. def gradient(x, y, m, b):
  28. dm =0
  29. db=0
  30. N=float(len(x))
  31. for i in range(0,len(x)):
  32. print("gradient.... dm= ",dm,"db==",db)
  33. dm+=-(2/N)*(x[i]*(y[i]-(x[i]*m+b)))
  34. db+=-(2/N)*(y[i]-m*x[i]+b)
  35.  
  36. # db= -(2/N) * (y[i] - ((m*x[i]) + b))
  37. #dm= -(2/N) * x[i] * (y[i] - ((m * x[i]) + b))
  38. return dm,db
  39.  
  40.  
  41. def gradient_decent(x, y,error_threshold):
  42. m=0
  43. b=0
  44. error= 100
  45. last_error=error+1
  46. while(error>error_threshold ):
  47. print("Error: "+str(error))
  48. dm,db = gradient(x,y,m,b)
  49. print("dm= ",dm,"db==",db)
  50. error = standard_error(x,y,m,b)
  51. m=m-dm*0.2
  52. b=b-db*0.2
  53. #b=b+db
  54. last_error =error
  55. error = standard_error(x,y,m,b)
  56. return m,b
  57.  
  58.  
  59.  
  60.  
  61.  
  62.  
  63. def line_fit(x,y):
  64. m=(( (mean(x) * mean(y) ) - mean(x*y) ) /
  65.  
  66. ( (mean(x)*mean(x)) - mean(x*x) ) )
  67. b= mean(y)- m*mean(x)
  68. return m,b
  69.  
  70.  
  71. def square_error(y_orig, y_line):
  72.  
  73. return sum( (y_line-y_orig)**2)
  74.  
  75.  
  76. def R2(y_orig,y_line):
  77.  
  78. y_mean_line = [mean(y_orig) for y in y_orig]
  79. SE_reg = square_error(y_orig, y_line)
  80. SE_mean = square_error(y_orig, y_mean_line)
  81.  
  82. return 1-(SE_reg/SE_mean)
  83.  
  84.  
  85.  
  86. def printus_maximus(x,y):
  87. m,b= line_fit(x,y)
  88. regression_line=[(m*x)+b for x in x]
  89. r_squared = R2(y, regression_line)
  90. print("R^2 = ",x, r_squared )
  91. print("Result = ",m,b)
  92. plt.scatter(x,y)
  93. plt.plot(x,regression_line)
  94.  
  95.  
  96. def printus_maximus2dot0(x,y):
  97. m,b= gradient_decent(x,y,0.0005)
  98. regression_line=[(m*x)+b for x in x]
  99. plt.scatter(x,y)
  100. plt.plot(x,regression_line)
  101.  
  102.  
  103.  
  104. printus_maximus2dot0(xf,yf)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement