Advertisement
Guest User

Untitled

a guest
Oct 14th, 2019
104
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.67 KB | None | 0 0
  1. from scipy.spatial import distance
  2. import numpy as np
  3. from sklearn.base import BaseEstimator
  4.  
  5. class LinearReg(BaseEstimator):
  6.     #eta == step
  7.     def __init__(self, gd_type, loss_type="MSE",
  8.                  tolerance=1, max_iter=15, w0=None, alpha=0.001, eta=0.01):
  9.         self.loss_type = loss_type
  10.         self.gd_type = gd_type
  11.         self.tolerance = tolerance
  12.         self.max_iter = max_iter
  13.         self.w0 = w0
  14.         self.alpha = alpha
  15.         self.w = None
  16.         self.eta = eta
  17.         self.iterations = 0
  18.         self.loss_history = None # list of loss function values at each training iteration
  19.    
  20.     def fit(self, _X, _y):
  21.         if (self.gd_type == 'stohastic'):
  22.             indexes = np.random.randint(_X.shape[0], size = 300)
  23.             y = _y[indexes]
  24.             X = _X[indexes, :]
  25.         else:
  26.             X = _X
  27.             y = _y
  28.  
  29.        
  30.         self.loss_history = []
  31.         self.w = np.random.uniform(0.8, 0.9, (X.shape[1]))
  32.         self.w0 = np.random.uniform(1.5, 1.6, (X.shape[1]))
  33.         grad_prew = (self.w0).copy()
  34.         grad_cur = (self.w).copy()
  35.         c = 0 #iterations
  36.         for i in range(self.max_iter):
  37.             if (self.gd_type == 'full' or self.gd_type == 'stohastic'):
  38.                 new_w = self.w0 - self.eta * self.calc_gradient(X, y)
  39.                 print("CALC_GRAD: = ", self.calc_gradient(X, y))
  40.                 self.w0 = self.w.copy()
  41.                 self.w = new_w.copy()
  42.             else:
  43.                 grad_cur = self.alpha * self.w0 + self.eta * self.calc_gradient(X, y)
  44.                 self.w0 = self.w.copy()
  45.                 self.w -= grad_cur
  46.                
  47.             с = c + 1
  48.             self.loss_history.append(self.calc_loss(X, y))
  49.             dist = distance.euclidean(self.w0, self.w)
  50.             if (dist <= self.tolerance): break
  51.        
  52.         self.iterations = c
  53.         return self
  54.         pass
  55.    
  56.     def predict(self, X):
  57.         if self.w is None:
  58.             raise Exception('Not trained yet')
  59.         prediction = np.dot(X, (self.w).T)
  60.         return prediction
  61.         pass
  62.    
  63.     def calc_gradient(self, X, y):
  64.         if (self.loss_type == 'R'):
  65.             return 2 * (np.dot(X.T, self.predict(X) - y))/(np.sum((y - np.mean(y))**2, axis = 0))
  66.         elif (self.loss_type == 'MSE'):
  67.             return (2 / X.shape[0]) * np.dot(X.T, self.predict(X) - y.T)
  68.         pass
  69.  
  70.     def calc_loss(self, X, y):
  71.         if (self.loss_type == 'MSE'):
  72.             return np.mean((np.dot(X, (self.w).T) - y.T)**2, axis=0)
  73.         else:
  74.             return (1 - (np.sum((self.predict(X) - y)**2, axis = 0) / np.sum((y - np.mean(y))**2, axis = 0)))
  75.         pass
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement