Advertisement
Guest User

Untitled

a guest
Jul 19th, 2019
71
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.86 KB | None | 0 0
  1. print sum(errorfunc([1.0], x, y))
  2.  
  3. print sum(errorfunc([1.08], x, y))
  4.  
  5. print sum(errorfunc([1.25], x, y))
  6.  
  7. print sum(errorfunc([1.4], x, y))
  8.  
  9. from __future__ import division
  10. import matplotlib.pyplot as plt
  11. import numpy
  12. import random
  13. from scipy.optimize import leastsq
  14.  
  15.  
  16. def errorfunc(params, x_data, y_data) :
  17. """
  18. Return error at each x point, to a straight line of gradient m
  19. This 1-parameter error function has a clearly defined minimum
  20. """
  21. squared_errors = []
  22.  
  23. for i, lm in enumerate(x_data) :
  24. predicted_um = lm * params[0]
  25. squared_errors.append((y_data[i] - predicted_um)**2)
  26.  
  27. return squared_errors
  28.  
  29.  
  30.  
  31. plt.figure()
  32.  
  33. ###################################################################
  34. # STEP 1: make a scatter plot of the data
  35. plot_real_data = True
  36. ###################################################################
  37.  
  38. if plot_real_data :
  39. # 100 points of real data
  40. x = [0.85772, 0.17135, 0.03401, 0.17227, 0.17595, 0.1742, 0.22454, 0.32792, 0.19036, 0.17109, 0.16936, 0.17357, 0.6841, 0.24588, 0.22913, 0.28291, 0.19845, 0.3324, 0.66254, 0.1766, 0.47927, 0.47999, 0.50301, 0.16035, 0.65964, 0.0, 0.14308, 0.11648, 0.10936, 0.1983, 0.13352, 0.12471, 0.29475, 0.25212, 0.08334, 0.07697, 0.82263, 0.28078, 0.24192, 0.25383, 0.26707, 0.26457, 0.0, 0.24843, 0.26504, 0.24486, 0.0, 0.23914, 0.76646, 0.66567, 0.62966, 0.61771, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.79157, 0.06889, 0.07669, 0.1372, 0.11681, 0.11103, 0.13577, 0.07543, 0.10636, 0.09176, 0.10941, 0.08327, 1.19903, 0.20987, 0.21103, 0.21354, 0.26011, 0.28862, 0.28441, 0.2424, 0.29196, 0.20248, 0.1887, 0.20045, 1.2041, 0.20687, 0.22448, 0.23296, 0.25434, 0.25832, 0.25722, 0.24378, 0.24035, 0.17912, 0.18058, 0.13556, 0.97535, 0.25504, 0.20418, 0.22241]
  41. y = [1.13085, 0.19213, 0.01827, 0.20984, 0.21898, 0.12174, 0.38204, 0.31002, 0.26701, 0.2759, 0.26018, 0.24712, 1.18352, 0.29847, 0.30622, 0.5195, 0.30406, 0.30653, 1.13126, 0.24761, 0.81852, 0.79863, 0.89171, 0.19251, 1.33257, 0.0, 0.19127, 0.13966, 0.15877, 0.19266, 0.12997, 0.13133, 0.25609, 0.43468, 0.09598, 0.08923, 1.49033, 0.27278, 0.3515, 0.38368, 0.35134, 0.37048, 0.0, 0.3566, 0.36296, 0.35054, 0.0, 0.32712, 1.23759, 1.02589, 1.02413, 0.9863, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.19224, 0.12192, 0.12815, 0.2672, 0.21856, 0.14736, 0.20143, 0.1452, 0.15965, 0.14342, 0.15828, 0.12247, 0.5728, 0.10603, 0.08939, 0.09194, 0.1145, 0.10313, 0.13377, 0.09734, 0.12124, 0.11429, 0.09536, 0.11457, 0.76803, 0.10173, 0.10005, 0.10541, 0.13734, 0.12192, 0.12619, 0.11325, 0.1092, 0.11844, 0.11373, 0.07865, 1.28568, 0.25871, 0.22843, 0.26608]
  42. else :
  43. # 100 points of test data with noise added
  44. x_clean = numpy.linspace(0,1.2,100)
  45. y_clean = [ i * 1.38 for i in x_clean ]
  46. x = [ i + random.uniform(-1 * random.uniform(0, 0.1), random.uniform(0, 0.1)) for i in x_clean ]
  47. y = [ i + random.uniform(-1 * random.uniform(0, 0.5), random.uniform(0, 0.5)) for i in y_clean ]
  48.  
  49. plt.subplot(2,1,1)
  50. plt.scatter(x,y); plt.xlabel('x'); plt.ylabel('y')
  51.  
  52.  
  53. # STEP 2: vary gradient m of a y = mx fitting line
  54. # plot sum squared error with respect to gradient m
  55. # here you can see by eye, the optimal gradient of the fitting line
  56. plt.subplot(2,1,2)
  57. try_m = numpy.linspace(0.1,4,200)
  58. sse = [ sum(errorfunc([m], x, y)) for m in try_m ]
  59. plt.plot(try_m,sse); plt.xlabel('line gradient, m'); plt.ylabel('sum-squared error')
  60.  
  61. # STEP 3: use leastsq() to find optimal gradient m
  62. params = [2] # start with initial guess of 2 for gradient
  63. params_fitted, cov, infodict, mesg, ier = leastsq(errorfunc, params[:], args=(x, y), full_output=1)
  64. optimal_m = params_fitted[0]
  65. print optimal_m
  66.  
  67. # optimal gradient m should be the minimum of the error function
  68. plt.subplot(2,1,2)
  69. plt.plot([optimal_m,optimal_m],[0,100], 'r')
  70. # optimal gradient m should give best fit straight line
  71. plt.subplot(2,1,1)
  72. plt.plot([0, 1.2],[0, 1.2 * optimal_m],'r')
  73.  
  74. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement