daily pastebin goal
14%
SHARE
TWEET

Untitled

a guest Jan 23rd, 2019 89 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. import math
  2. import numpy
  3. import numpy as np
  4.  
  5.  
  6. class FastGradientOptimizer:
  7.  
  8.     def __init__(self, oracle, start):
  9.         self.oracle = oracle
  10.         self.start = start
  11.         self.grad = 0
  12.  
  13.     def get_val(self, alpha):
  14.         return self.oracle.get_func(self.start - self.grad * alpha)
  15.  
  16.     # метод золотого сечения
  17.     def golden_section(self, b, n):
  18.         a = 0.0
  19.         gs = 1.618
  20.         for i in range(n):
  21.             w = (b - a) / gs
  22.             x1 = b - w
  23.             x2 = a + w
  24.             if self.get_val(x1) >= self.get_val(x2):
  25.                 a = x1
  26.             else:
  27.                 b = x2
  28.         return (a + b) / 2.0
  29.  
  30.     def optimize(self, n, e):
  31.         i = 0
  32.         self.grad = self.oracle.get_grad(self.start)
  33.         while i < n and np.linalg.norm(self.grad) > e:
  34.             self.start = self.start - self.grad * self.golden_section(50, 10)
  35.             self.grad = self.oracle.get_grad(self.start)
  36.             i += 1
  37.         return self.start
  38.  
  39. # *************************************
  40.  
  41. def base_function(x, weights):
  42.     return x @ weights
  43.  
  44.  
  45. def logistic_func(weights, x):
  46.     return 1.0 / (1.0 + math.exp(-base_function(x, weights)))
  47.  
  48.  
  49. def logistic_func_all(weights, X):
  50.     return numpy.array([logistic_func(weights, x) for x in X])
  51.  
  52. # кросс энтропия из его методички
  53. # нумпай функции загугли
  54. def cross_entropy_loss(weights, X, y):
  55.     temp = X.dot(weights)
  56.     temp = numpy.multiply(temp, -y)
  57.     temp = numpy.exp(temp)
  58.     temp = numpy.log1p(temp)
  59.     return numpy.mean(temp)
  60.  
  61. # градиент кросс энтропии из методички
  62. def grad_cross_entropy_loss(weights, X, y):
  63.     temp = X @ weights
  64.     temp = numpy.multiply(temp, y)
  65.     temp = numpy.exp(temp)
  66.     temp = temp + 1
  67.     temp2 = []
  68.     for i in range(y.shape[0]):
  69.         temp2.append(X[i] * y[i])
  70.     temp2 = numpy.array(temp2)
  71.     temp3 = []
  72.     for i in range(y.shape[0]):
  73.         temp3.append(numpy.multiply(temp2[i],  1.0 / temp[i]))
  74.     temp3 = numpy.array(temp3)
  75.     return -(numpy.array(temp3).mean(0))
  76.  
  77. # объект через который оптимайзер получает значения градиента и функции
  78. class Oracle:
  79.     def __init__(self, m, y):
  80.         self.m = m
  81.         self.y = y
  82.  
  83.     def get_grad(self, t):
  84.         return grad_cross_entropy_loss(t, self.m, self.y)
  85.  
  86.     def get_func(self, t):
  87.         return cross_entropy_loss(t, self.m, self.y)
  88.  
  89. # запуск оптимайзера
  90. def fit_logistic_regression(X, y):
  91.     return FastGradientOptimizer(Oracle(X, y), np.zeros(X.shape[1], float)).optimize(2000, 1e-6)
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
 
Top