Advertisement
Guest User

Untitled

a guest
Feb 23rd, 2020
107
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 3.87 KB | None | 0 0
  1. import numpy as np , random , math
  2. from scipy.optimize import minimize
  3. import matplotlib.pyplot as plt
  4.  
  5. def generate_data():
  6.     classA=np.concatenate(
  7.                             (
  8.                                 np.random.randn(10,2)*0.2+[2.0,1.0],
  9.                                 np.random.randn(10,2)*0.2+[0.0,1.0],
  10.                                 #np.random.randn(10,2)*0.2+[2.0,2.0]
  11.                              )
  12.                           )
  13.  
  14.     classB=np.concatenate(
  15.                             (
  16.                                 np.random.randn(10,2)*0.2+[1.0,1.0],
  17.                                 np.random.randn(10,2)*0.2+[3.0,1.0],
  18.                                 #np.random.randn(10,2)*0.2+[2.0,3.5]
  19.                              )
  20.                           )
  21.  
  22.     inputs=np.concatenate((classA,classB))
  23.     targets=np.concatenate(
  24.     (np.ones(classA.shape[0]),
  25.     -np.ones(classB.shape[0])))
  26.     N=inputs.shape[0]
  27.  
  28.     #Numberofrows(samples)
  29.  
  30.     permute=list(range(N))
  31.     random.shuffle(permute)
  32.     inputs=inputs[permute,:]
  33.     targets=targets[permute]
  34.     return(inputs, targets, classA, classB)
  35.  
  36. def basic_data():
  37.     return([[1,1],[2,2]],[1,-1],[[1,1]],[[2,2]])
  38.  
  39. (training_data, target_class, classA, classB) = generate_data()
  40. p = [[0]*len(training_data) for i in range(len(training_data))]
  41. alpha = []
  42. positive_alphas = []
  43.  
  44. def linear_kernel(x, y):
  45.     return np.dot(np.transpose(x), y)
  46.  
  47. def polynomial_kernel(x, y, degree):
  48.     return np.power(linear_kernel(x,y) + 1, degree)
  49.  
  50. def rbf_kernel(x, y, sigma):
  51.     exponent = (-np.linalg.norm(np.subtract(x,y))**2) / (2*(sigma**2))
  52.     return np.exp(exponent)
  53.  
  54.  
  55. def zerofun(alpha):
  56.     return np.dot(alpha, target_class)
  57.  
  58. def precalc_kernel():
  59.     for i in range(len(training_data)):
  60.         for j in range(len(training_data)):
  61.             p[i][j] = ( target_class[i] * target_class[j] * rbf_kernel(training_data[i], training_data[j], 1))
  62.  
  63. def objective(alpha):
  64.     alpha_sum = np.sum(alpha)
  65.     sum = 0
  66.     for i in range(0, len(alpha)):
  67.         for j in range(0, len(alpha)):
  68.             sum += alpha[i] * alpha[j] * p[i][j]
  69.     return (sum/2) - alpha_sum
  70.  
  71. def plot_data():
  72.     plt.plot([p[0] for p in classA], [p[1] for p in classA], 'b.')
  73.     plt.plot([p[0] for p in classB], [p[1] for p in classB], 'r.')
  74.     plt.axis('equal')
  75.  
  76.     xgrid=np.linspace(-2,5)
  77.     ygrid=np.linspace(-2,3)
  78.     grid=np.array([[indicator(x,y) for x in xgrid] for y in ygrid])
  79.  
  80.     plt.contour(xgrid,ygrid,grid,(-1.0,0.0,1.0), colors=('red','black','blue'),linewidths=(1,3,1))
  81.     plt.savefig("rbf_sig1_highvar.pdf")
  82.     plt.show()
  83.  
  84. def extract_alphas(alpha):
  85.     result = []
  86.     for i in range(len(alpha)):
  87.         if alpha[i] > 1e-5:
  88.             result.append((training_data[i], target_class[i], alpha[i]))
  89.     return result
  90.  
  91. def calculate_bias():
  92.     bias = 0
  93.     (s, ts, _) = positive_alphas[0]
  94.  
  95.     if (C is not None):
  96.         for (tmp_s, tmp_ts, a) in positive_alphas:
  97.             if a < C:
  98.                 s = tmp_s
  99.                 ts = tmp_ts
  100.  
  101.     for i in range(len(alpha)):
  102.         bias += (alpha[i] * target_class[i] * rbf_kernel(s, training_data[i], 1))
  103.     return bias-ts
  104.  
  105. def indicator(x, y):
  106.     ans = 0
  107.     bias = calculate_bias()
  108.  
  109.     for (data_point, target_class, alpha) in positive_alphas:
  110.         ans += (alpha * target_class * rbf_kernel([x,y], data_point, 1))
  111.  
  112.     return ans-bias
  113.  
  114.  
  115. def main():
  116.     global alpha
  117.     global positive_alphas
  118.     global C
  119.  
  120.     precalc_kernel()
  121.  
  122.     C = None
  123.     start = np.zeros(len(training_data))
  124.     B = [(0, C) for b in range(len(training_data))]
  125.     XC = {'type':'eq', 'fun':zerofun}
  126.     ret = minimize(objective, start, bounds=B, constraints=XC)
  127.  
  128.     alpha = ret['x']
  129.     positive_alphas = extract_alphas(alpha)
  130.  
  131.     print("Able to minimize: " + str(ret['success']))
  132.     plot_data()
  133.  
  134. if __name__ == "__main__":
  135.     main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement