Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- class MySupportVectorMachine:
- def __init__(self):
- self.w_vector = []
- self.bias = 0
- self.support_ = []
- #---------------------------------------------------
- def get_support_vector_indices(self,Xdata, Ydata):
- #pass
- #---------------------------------------------------
- def fit(self,Xdata,Ydata):
- #Initialize our SVMs weight vector with zeros
- w = np.zeros(len(Xdata[0]))
- #The learning rate
- eta = 0.0001
- #how many iterations to train for
- epochs = 1000
- #store misclassifications so we can plot how they change over time
- bias = 0
- C = 0.001
- print("nnTraining... ",end ="" )
- #training part, gradient descent part
- for epoch in range(1,epochs):
- if epoch %1000 == 0:
- print("." , end = "")
- for i, x in enumerate(Xdata):
- #misclassification
- if Ydata[i] == 0:
- Ydata[i]= -1
- if (Ydata[i]*np.dot(Xdata[i], w) + bias) < 1:
- #misclassified update for ours weights
- w = w + eta * ( C *(Xdata[i] * Ydata[i]) + (-2 *(1/epoch)* w) )
- bias = bias + eta * (Ydata[i] * C)
- else:
- #correct classification, update our weights
- w = w + eta * (-2 *(1/epoch)* w)
- if Ydata[i] == -1:
- Ydata[i]= 0
- self.w_vector = w
- self.bias = bias
- print("ntttSVM is Successfully Trained...n")
- self.get_support_vector_indices(Xdata, Ydata)
- #---------------------------------------------------
- def predict(self, XtestData):
- #print("nn w_vector: n",self.w_vector)
- #print("nn bias: n",self.bias)
- predictedDataList = []
- for xi in XtestData:
- predictedData = np.sign(np.dot(self.w_vector,xi) + self.bias).astype(int)
- if predictedData == -1:
- predictedData = 0
- predictedDataList.append(predictedData)
- return(np.array(predictedDataList))
- #---------------------------------------------------
- # End of the Class
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement