Advertisement
Guest User

Untitled

a guest
Dec 5th, 2016
66
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.88 KB | None | 0 0
  1. #This function takes a value from X and two thetas that determine what side
  2. #of the sigmoid curve x falls on, this returns either 1 or 0
  3. def sigmoid( x, theta0, theta1):
  4.  
  5. output = 1/ (1 + np.exp( -( theta0 + theta1 * x)))
  6.  
  7. return output
  8.  
  9. def round( num):
  10.  
  11. if num > .5:
  12. return 1
  13. else:
  14. return 0
  15.  
  16. # this is the sum of the sigmoid function for all of X
  17. def sumofSigmoid( X, Y ,theta0, theta1):
  18.  
  19. sum = 0
  20.  
  21. for i in range( 0, len(X)):
  22.  
  23. sum += ((sigmoid( X[i], theta0, theta1) - Y[i]) * X[i])
  24.  
  25. return np.squeeze(sum)
  26.  
  27. #minimises theta0 and theta1 using one step of gradient descent method, sounds like decending grades :/
  28. def gradient( X, Y, theta0, theta1, learning_rate):
  29.  
  30. temp0 = theta0 - (learning_rate * sumofSigmoid( X, Y, theta0, theta1))
  31. temp1 = theta0 - (learning_rate * sumofSigmoid( X, Y, theta0, theta1))
  32.  
  33. return temp0, temp1
  34.  
  35. #this function updates the gradient for n amount of iterations
  36. def gradeDescent( X, Y, iterations, learning_rate):
  37.  
  38. tempa = -4
  39. tempb = 1
  40.  
  41. #normalize X
  42. X = (X - np.mean(X, axis=0)) / np.std(X, axis=0)
  43.  
  44. for i in range( 1, iterations):
  45.  
  46. tempa, tempb = gradient( X, Y, tempa, tempb, learning_rate)
  47. print cost(X, Y, tempa, tempb)
  48.  
  49. return tempa, tempb
  50.  
  51. #returns the total cost of X and Y based on theta
  52. def cost( X, Y, theta0, theta1):
  53.  
  54. step1 = 0
  55. step2 = 0
  56.  
  57. #for each X and Y in the dataset find cost for 1 and 0
  58. for i in range( 0, len(X)):
  59.  
  60. temp = sigmoid( X[i], theta0, theta1)
  61. step1 += Y[i] * np.log(temp)
  62. step2 += (1-Y[i]) * np.log(1 - temp)
  63.  
  64. #completes the algorithm for finging the cost and returns
  65. total = -step1 - step2
  66. return np.mean(total)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement