Advertisement
Guest User

Untitled

a guest
Jun 24th, 2019
72
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.81 KB | None | 0 0
  1. import math
  2. import numpy as np
  3. import matplotlib.pyplot as plt
  4.  
  5. # --------------------------------------------------------------------------------
  6. # Gaussian basis function
  7. # --------------------------------------------------------------------------------
  8. def gaussian(mean, sigma):
  9. """
  10. Args:
  11. mean:
  12. sigma:
  13. """
  14. def _gaussian(x):
  15. return np.exp(-0.5 * ((x - mean) / sigma) ** 2)
  16. return _gaussian
  17.  
  18. # --------------------------------------------------------------------------------
  19. # Design matrix
  20. # --------------------------------------------------------------------------------
  21. def phi(f, x):
  22. bias = np.array([1]) # bias parameter(i = 0)
  23. # bias+basis
  24. return np.append(bias, f(x))
  25.  
  26. # --------------------------------------------------------------------------------
  27. # Data generation utility
  28. # --------------------------------------------------------------------------------
  29. from numpy.random import rand
  30. def uniform_variable_generator(samples):
  31. _random = rand(samples)
  32. return _random
  33.  
  34. def noise_generator(samples, mu=0.0, beta=0.1):
  35. noise = np.random.normal(mu, beta, samples)
  36. return noise
  37.  
  38. def sigmoid(x):
  39. return 1 / (1 + np.exp(-x))
  40.  
  41. def generator_t_function(x):
  42. #return np.sin(x)
  43. return sigmoid(x)
  44.  
  45. def generator_X_function(x):
  46. return 2 * np.pi * x
  47. #return 2 * np.pi * x
  48.  
  49. # --------------------------------------------------------------------------------
  50. # Observations
  51. # --------------------------------------------------------------------------------
  52. #X = np.array([0.02, 0.12, 0.19, 0.27, 0.42, 0.51, 0.64, 0.84, 0.88, 0.99])
  53. #t = np.array([0.05, 0.87, 0.94, 0.92, 0.54, -0.11, -0.78, -0.79, -0.89, -0.04])
  54. samples = 20
  55.  
  56. #X = np.array([0.02, 0.12, 0.19, 0.27, 0.42, 0.51, 0.64, 0.84, 0.88, 0.99])
  57. #t = np.array([0.05, 0.87, 0.94, 0.92, 0.54, -0.11, -0.78, -0.79, -0.89, -0.04])
  58. X = generator_X_function(uniform_variable_generator(samples))
  59. t = generator_t_function(X) + noise_generator(samples, beta=0.1)
  60.  
  61. MAX_X = max(X)
  62. NUM_X = len(X)
  63. MAX_T = max(t)
  64. NUM_T = len(t)
  65.  
  66. # --------------------------------------------------------------------------------
  67. # Gaussian basis function parameters
  68. # --------------------------------------------------------------------------------
  69. sigma = 0.1 * MAX_X
  70.  
  71. # mean of gaussian basis function (11 dimension w1, w2, ... w11)
  72. mean = np.arange(0, MAX_X + sigma, sigma)
  73.  
  74. # Basis function
  75. f = gaussian(mean, sigma)
  76.  
  77. # --------------------------------------------------------------------------------
  78. # Design matrix
  79. # --------------------------------------------------------------------------------
  80. PHI = np.array([phi(f, x) for x in X])
  81.  
  82. #alpha = 0.1
  83. #beta = 9.0
  84. alpha = 0.5 # larger alpha gives smaller w preventing overfitting (0 -> same with linear regression)
  85. beta = 5 # Small beta allows more variance (deviation)
  86.  
  87. Sigma_N = np.linalg.inv(alpha * np.identity(PHI.shape[1]) + beta * np.dot(PHI.T, PHI))
  88.  
  89. mean_N = beta * np.dot(Sigma_N, np.dot(PHI.T, t))
  90.  
  91. # --------------------------------------------------------------------------------
  92. # Bayesian regression
  93. # --------------------------------------------------------------------------------
  94. xlist = np.arange(0, MAX_X, 0.01)
  95. plt.title("Bayesian regression")
  96. plt.plot(xlist, [np.dot(mean_N, phi(f, x)) for x in xlist], 'b')
  97. plt.plot(X, t, 'o', color='r')
  98. plt.show()
  99.  
  100. # --------------------------------------------------------------------------------
  101. # Linear regression
  102. # --------------------------------------------------------------------------------
  103. # w for linear regression parameter
  104. #w = np.linalg.solve(np.dot(PHI.T, PHI), np.dot(PHI.T, t))
  105. # --------------------------------------------------------------------------------
  106. l = 0.05
  107. regularization = np.identity(PHI.shape[1])
  108. w = np.linalg.solve(
  109. np.dot(PHI.T, PHI) + (l * regularization),
  110. np.dot(PHI.T, t)
  111. )
  112.  
  113. xlist = np.arange(0, MAX_X, 0.01)
  114. plt.title("Linear regression")
  115. plt.plot(xlist, [np.dot(w, phi(f, x)) for x in xlist], 'g')
  116. plt.plot(X, t, 'o', color='r')
  117. plt.show()
  118.  
  119. # --------------------------------------------------------------------------------
  120. # Predictive Distribution
  121. # --------------------------------------------------------------------------------
  122. def normal_dist_pdf(x, mean, var):
  123. return np.exp(-(x-mean) ** 2 / (2 * var)) / np.sqrt(2 * np.pi * var)
  124.  
  125. def quad_form(A, x):
  126. return np.dot(x, np.dot(A, x))
  127.  
  128. xlist = np.arange(0, MAX_X, 0.01)
  129. #tlist = np.arange(-1.5 * MAX_T, 1.5 * MAX_T, 0.01)
  130. tlist = np.arange(
  131. np.mean(t) - (np.max(t)-np.min(t)),
  132. np.mean(t) + (np.max(t)-np.min(t)),
  133. 0.01
  134. )
  135. z = np.array([
  136. normal_dist_pdf(tlist, np.dot(mean_N, phi(f, x)),
  137. 1 / beta + quad_form(Sigma_N, phi(f, x))) for x in xlist
  138. ]).T
  139.  
  140. plt.contourf(xlist, tlist, z, 5, cmap=plt.cm.coolwarm)
  141. plt.title("Predictive distribution")
  142. plt.plot(xlist, [np.dot(mean_N, phi(f, x)) for x in xlist], 'r')
  143. plt.plot(X, t, 'go')
  144. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement