Anupznk

Untitled

Jul 2nd, 2021
28
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. import numpy as np
  2. from matplotlib import pyplot as plt
  3.  
  4.  
  5. def showGraphScatter(x, y):
  6. plt.scatter(x, y, color='red')
  7. plt.show()
  8.  
  9.  
  10. def polynomialFunc(x, constants):
  11. funcVal = constants[0]
  12. constants = np.array(constants)
  13. for i in range(1, constants.size):
  14. funcVal += constants[i] * x
  15. x *= x
  16.  
  17. return funcVal
  18.  
  19.  
  20. def showGraph(lowerLim, upperLim, constants, xarr, yarr):
  21. x = np.arange(lowerLim, upperLim, .1)
  22. y = []
  23. for i in range(x.size):
  24. y.append(polynomialFunc(x[i], constants))
  25. plt.plot(x, y, color='blue')
  26. # plt.ylim(0, 0.4)
  27. plt.xlabel('x')
  28. plt.ylabel('y')
  29. plt.scatter(xarr, yarr, color='red')
  30. plt.grid(True, which='both')
  31. plt.axhline(y=0, color='green')
  32. plt.axvline(x=0, color='green')
  33. plt.show()
  34.  
  35.  
  36. def gaussianElimination(A, B, d=0):
  37. # FORWARD ELIMINATION
  38. i = 0
  39. n = B.size
  40. while i < n:
  41. j = i + 1
  42. while j < n:
  43. # RATIO = R2/R1
  44. r = A[j][i] / A[i][i]
  45.  
  46. k = 0
  47. while k < n:
  48. # (R2 = R2 - R1 * r)
  49. A[j][k] -= r * A[i][k]
  50.  
  51. k += 1
  52.  
  53. B[j] -= r * B[i]
  54. if d:
  55. print('Coefficient Matrix:')
  56. print(A)
  57. print('Constant Matrix:')
  58. print(B)
  59.  
  60. j += 1
  61.  
  62. i += 1
  63.  
  64. # BACK SUBSTITUTION
  65. solution = np.zeros(n)
  66. # CALCULATING THE VALUE OF THE LAST VARIABLE
  67. solution[n - 1] = B[n - 1] / A[n - 1][n - 1]
  68.  
  69. # FINDING THE REST OF THE SOLUTIONS USING THE PREVIOUS SOLUTIONS
  70. i = n - 2
  71. while i >= 0:
  72. solution[i] = B[i]
  73. j = i + 1
  74. while j < n:
  75. solution[i] = solution[i] - A[i][j] * solution[j]
  76. j += 1
  77. solution[i] /= A[i][i]
  78. i -= 1
  79.  
  80. return solution
  81.  
  82.  
  83. def polynomialRegressionSp(x, y, numOfDataPoints):
  84. # y = a_0 + a_1x + a_2x^2 (2nd degree polynomial)
  85. sum_x = 0
  86. sum_y = 0
  87. sum_x2 = 0
  88. sum_x3 = 0
  89. sum_x4 = 0
  90. sum_x_y = 0
  91. sum_x2_y = 0
  92.  
  93. for i in range(numOfDataPoints):
  94. sum_x += x[i]
  95. sum_y += y[i]
  96. sum_x2 += x[i] ** 2
  97. sum_x3 += x[i] ** 3
  98. sum_x4 += x[i] ** 4
  99. sum_x_y += x[i] * y[i]
  100. sum_x2_y += x[i] ** 2 * y[i]
  101.  
  102. matA = [[numOfDataPoints, sum_x, sum_x2], [sum_x, sum_x2, sum_x3], [sum_x2, sum_x3, sum_x4]]
  103. matB = [sum_y, sum_x_y, sum_x2_y]
  104. constants = gaussianElimination(matA, matB)
  105. return constants
  106.  
  107.  
  108. def polynomialRegression(x, y, numOfDataPoints, degree):
  109. x = np.array(x)
  110. y = np.array(y)
  111. x_exp_sums = np.zeros(2 * (degree + 1))
  112. x_exp_y_sums = np.zeros(2 * (degree + 1))
  113. x_exp_sums[0] = numOfDataPoints
  114. x_exp_y_sums[0] = sum(y)
  115. #
  116. # for i in range(1, 2 * (degree + 1)):
  117. # x_exp_sums[i] = sum(x_exp_sums[i] * x)
  118. # x_exp_y_sums[i] = sum(y * x_exp_sums[i])
  119.  
  120. for i in range(1, 2 * (degree) + 1):
  121. for j in range(numOfDataPoints):
  122. x_exp_sums[i] += x[j] ** i
  123. x_exp_y_sums[i] += y[j] * (x[j] ** i)
  124.  
  125. matA = np.zeros((degree + 1, degree + 1))
  126. matB = np.zeros(degree + 1)
  127.  
  128. for i in range(degree + 1):
  129. matB[i] = x_exp_y_sums[i]
  130. for j in range(degree + 1):
  131. matA[i][j] = x_exp_sums[i + j]
  132.  
  133. print(matA)
  134. print(matB)
  135.  
  136. return gaussianElimination(matA, matB)
  137.  
  138.  
  139. x = [80, 40, -40, -120, -200, -280, -340]
  140. y = [6.47e-6, 6.24e-6, 5.72e-6, 5.09e-6, 4.30e-6, 3.33e-6, 2.45e-6]
  141. # showGraphScatter(x, y)
  142. constants = polynomialRegression(x, y, 7, 2)
  143. showGraph(-400, 200, constants, x, y)
  144. print('result', constants)
  145.  
RAW Paste Data