Advertisement
cyphric

Untitled

Feb 24th, 2022
55
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.39 KB | None | 0 0
  1. #!/usr/bin/env python
  2. # coding: utf-8
  3.  
  4. # ## Support Vector Machine
  5.  
  6. # In[790]:
  7.  
  8.  
  9. import numpy as np
  10. import math, random
  11. import matplotlib.pyplot as plt
  12. from scipy.optimize import minimize
  13. global C, t, x, b, nonzero
  14.  
  15.  
  16. # In[1000]:
  17.  
  18.  
  19. # Generating the data sets
  20. np.random.seed(100) # Will generate the same random data everytime you run the program
  21.  
  22. classA = np.concatenate(
  23. (np.random.randn(10 , 2) * 0.5 + [ 1.5 , 0.5] ,
  24. np.random.randn(10 , 2) * 0.5 + [ -1.5 , 0.5]))
  25.  
  26. classB = np.random.randn(20 , 2) * 0.5 + [0.0 , - 0.5]
  27.  
  28.  
  29. inputs = np.concatenate((classA , classB))
  30. targets = np.concatenate(
  31. (np.ones( classA.shape[0]) ,
  32. - np.ones( classB.shape[0])))
  33.  
  34. N = inputs.shape[0] #Number of rows (samples), .shape[checks number of rows at place 0]
  35.  
  36.  
  37. permute=list(range (N))
  38. random.shuffle(permute)
  39. inputs = inputs[permute , : ] # List of 40 containing 2 elemens in each
  40. targets = targets[ permute ] # 1 or -1 Boolean value
  41.  
  42.  
  43. # In[1001]:
  44.  
  45.  
  46. def kernel(s,u):
  47. return np.dot(s,u) # Linear works
  48.  
  49. #return np.power(np.dot(s, u) + 1, 5) # Polynomial
  50. #Radial Basis Function
  51. #e_d = - np.power(np.linalg.norm(s - u), 2)
  52. #return math.exp(e_d/(2*5**2))
  53.  
  54.  
  55.  
  56. # In[1002]:
  57.  
  58.  
  59. def create_matrix(x,t):
  60. #x = np.array(N)
  61. #t = np.array(N)
  62. matrix = np.zeros((N,N))
  63.  
  64. for n in range(N):
  65. for m in range(N):
  66. matrix[n][m] = targets[n] * targets[m] * kernel(inputs[n],inputs[m])
  67.  
  68. return matrix
  69.  
  70. x=inputs
  71. t=targets
  72. matrix = create_matrix(x,t)
  73.  
  74.  
  75. # In[ ]:
  76.  
  77.  
  78.  
  79.  
  80.  
  81. # In[ ]:
  82.  
  83.  
  84.  
  85.  
  86.  
  87. # In[ ]:
  88.  
  89.  
  90.  
  91.  
  92.  
  93. # In[1003]:
  94.  
  95.  
  96. def objective(alpha):
  97. return 0.5*np.dot(alpha, np.dot(alpha, matrix)) - np.sum(alpha)
  98.  
  99.  
  100. def zerofun(alpha):
  101. return np.dot(alpha, t)
  102.  
  103.  
  104. def indicator(a, c):
  105. sum = 0
  106. for value in nonzero:
  107. sum += value[0] * value[2] * kernel([a,c],value[1])
  108. return sum - b
  109.  
  110.  
  111. def main():
  112. # Initial guess
  113. alpha_zero = np.zeros(N)
  114. B =[(0, C) for b in range(N)] #Upper bound boundary
  115. XC = {'type':'eq', 'fun':zerofun}
  116.  
  117. # Get solution
  118. sol = minimize( objective , alpha_zero , bounds = B, constraints = XC )
  119. return sol
  120.  
  121.  
  122. b = 0
  123. C = 1
  124. sol = main()
  125.  
  126. alpha = sol['x']
  127. not_zero = [(alpha[i], inputs[i], targets[i]) for i in range(N) if abs(alpha[i]) > 10e-5]
  128.  
  129.  
  130. # In[1004]:
  131.  
  132.  
  133. def b():
  134. sum = 0
  135. for j in nonzero:
  136. sum += j[0] * j[2] * kernel(j[1], not_zero[0][1])
  137. return sum - nonzero[0][2]
  138.  
  139. b = b()
  140.  
  141.  
  142. # In[979]:
  143.  
  144.  
  145. def plot():
  146. plt.plot([p [0] for p in classA ] ,
  147. [p [1] for p in classA ] ,
  148. 'b.')
  149. plt.plot([p [0] for p in classB ] ,
  150. [p [1] for p in classB ] ,
  151. 'r.' )
  152.  
  153. plt.axis('equal') # Force same scale on both axes
  154. #plt.savefig('svmplot .pdf')
  155.  
  156. # Decision boundaries
  157. xgrid=np.linspace( - 5, 5)
  158. ygrid=np.linspace ( - 4, 4)
  159.  
  160. grid=np.array( [ [ indicator (h , g) for h in xgrid ] for g in ygrid ])
  161.  
  162. plt.contour( xgrid , ygrid , grid , ( - 1.0, 0.0 , 1.0) , colors = ( 'r' , 'k' , 'b' ) , linewidths =(1 , 3 , 1))
  163. plt.show()
  164.  
  165.  
  166.  
  167. plot()
  168.  
  169.  
  170. # In[809]:
  171.  
  172.  
  173. plot()
  174. # RBF
  175.  
  176.  
  177. #
  178.  
  179. # In[821]:
  180.  
  181.  
  182. plot()
  183. # Linear Kernel
  184.  
  185.  
  186. # In[826]:
  187.  
  188.  
  189. plot()
  190. # Polynomial p = 2
  191.  
  192.  
  193. # In[832]:
  194.  
  195.  
  196. # New generated data set - Polynomial
  197. plot()
  198.  
  199.  
  200. # ## Theory
  201.  
  202. # C \
  203. # More slack allowed - lower c - Noisy data\
  204. # Less slack allowed - higher c - Less noisy data
  205. #
  206.  
  207. # In[932]:
  208.  
  209.  
  210. plot()
  211.  
  212.  
  213. # Linear - No solution obtained, high C so less slack allowed
  214.  
  215. # Non-linear kernels\
  216. # - Sigma\
  217. # Higher sigma, higher smoothness, less variance
  218. #
  219. # - p\
  220. # Higher p, higher variance, less smoothness
  221.  
  222. # In[942]:
  223.  
  224.  
  225. plot()
  226.  
  227.  
  228. # Polynomial p = 2
  229.  
  230. # In[944]:
  231.  
  232.  
  233. plot()
  234.  
  235.  
  236. # Polynomial p = 5
  237.  
  238. # In[954]:
  239.  
  240.  
  241. plot()
  242.  
  243.  
  244. # sigma = 2
  245.  
  246. # In[961]:
  247.  
  248.  
  249. plot()
  250.  
  251.  
  252. # sigma = 5
  253.  
  254. # # Question
  255.  
  256. # Imagine that you are given data that is not easily separable. When
  257. # should you opt for more slack rather than going for a more complex
  258. # model (kernel) and vice versa?
  259.  
  260. # If the data are noisy (moving into each other) then it is maybe good to increase slack\
  261. # but if it is more about how the spread of the data is then it is better to change the kernel
  262. #
  263.  
  264. # In[1005]:
  265.  
  266.  
  267. plot()
  268.  
  269.  
  270. # In[991]:
  271.  
  272.  
  273. plot()
  274.  
  275.  
  276. # In[985]:
  277.  
  278.  
  279. plot()
  280.  
  281.  
  282. # In[ ]:
  283.  
  284.  
  285.  
  286.  
  287.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement