Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np , random , math
- from scipy.optimize import minimize
- import matplotlib.pyplot as plt
- def generate_data():
- classA=np.concatenate(
- (
- np.random.randn(10,2)*0.2+[2.0,1.0],
- np.random.randn(10,2)*0.2+[0.0,1.0],
- #np.random.randn(10,2)*0.2+[2.0,2.0]
- )
- )
- classB=np.concatenate(
- (
- np.random.randn(10,2)*0.2+[1.0,1.0],
- np.random.randn(10,2)*0.2+[3.0,1.0],
- #np.random.randn(10,2)*0.2+[2.0,3.5]
- )
- )
- inputs=np.concatenate((classA,classB))
- targets=np.concatenate(
- (np.ones(classA.shape[0]),
- -np.ones(classB.shape[0])))
- N=inputs.shape[0]
- #Numberofrows(samples)
- permute=list(range(N))
- random.shuffle(permute)
- inputs=inputs[permute,:]
- targets=targets[permute]
- return(inputs, targets, classA, classB)
- def basic_data():
- return([[1,1],[2,2]],[1,-1],[[1,1]],[[2,2]])
- (training_data, target_class, classA, classB) = generate_data()
- p = [[0]*len(training_data) for i in range(len(training_data))]
- alpha = []
- positive_alphas = []
- def linear_kernel(x, y):
- return np.dot(np.transpose(x), y)
- def polynomial_kernel(x, y, degree):
- return np.power(linear_kernel(x,y) + 1, degree)
- def rbf_kernel(x, y, sigma):
- exponent = (-np.linalg.norm(np.subtract(x,y))**2) / (2*(sigma**2))
- return np.exp(exponent)
- def zerofun(alpha):
- return np.dot(alpha, target_class)
- def precalc_kernel():
- for i in range(len(training_data)):
- for j in range(len(training_data)):
- p[i][j] = ( target_class[i] * target_class[j] * rbf_kernel(training_data[i], training_data[j], 1))
- def objective(alpha):
- alpha_sum = np.sum(alpha)
- sum = 0
- for i in range(0, len(alpha)):
- for j in range(0, len(alpha)):
- sum += alpha[i] * alpha[j] * p[i][j]
- return (sum/2) - alpha_sum
- def plot_data():
- plt.plot([p[0] for p in classA], [p[1] for p in classA], 'b.')
- plt.plot([p[0] for p in classB], [p[1] for p in classB], 'r.')
- plt.axis('equal')
- xgrid=np.linspace(-2,5)
- ygrid=np.linspace(-2,3)
- grid=np.array([[indicator(x,y) for x in xgrid] for y in ygrid])
- plt.contour(xgrid,ygrid,grid,(-1.0,0.0,1.0), colors=('red','black','blue'),linewidths=(1,3,1))
- plt.savefig("rbf_sig1_highvar.pdf")
- plt.show()
- def extract_alphas(alpha):
- result = []
- for i in range(len(alpha)):
- if alpha[i] > 1e-5:
- result.append((training_data[i], target_class[i], alpha[i]))
- return result
- def calculate_bias():
- bias = 0
- (s, ts, _) = positive_alphas[0]
- if (C is not None):
- for (tmp_s, tmp_ts, a) in positive_alphas:
- if a < C:
- s = tmp_s
- ts = tmp_ts
- for i in range(len(alpha)):
- bias += (alpha[i] * target_class[i] * rbf_kernel(s, training_data[i], 1))
- return bias-ts
- def indicator(x, y):
- ans = 0
- bias = calculate_bias()
- for (data_point, target_class, alpha) in positive_alphas:
- ans += (alpha * target_class * rbf_kernel([x,y], data_point, 1))
- return ans-bias
- def main():
- global alpha
- global positive_alphas
- global C
- precalc_kernel()
- C = None
- start = np.zeros(len(training_data))
- B = [(0, C) for b in range(len(training_data))]
- XC = {'type':'eq', 'fun':zerofun}
- ret = minimize(objective, start, bounds=B, constraints=XC)
- alpha = ret['x']
- positive_alphas = extract_alphas(alpha)
- print("Able to minimize: " + str(ret['success']))
- plot_data()
- if __name__ == "__main__":
- main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement