Advertisement
Guest User

Untitled

a guest
Jun 29th, 2017
74
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.39 KB | None | 0 0
  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3.  
  4. # Hypothesis
  5. hypo = lambda _w, _x: 1 / (1 + np.exp(-_w * _x))
  6. # Cost
  7. cost = lambda _hypo, _y : -(1-_y) * np.log(1 - _hypo) -_y * np.log(_hypo)
  8. # Gradient
  9. def cost_gradient(w, X, Y):
  10. # Euler method to calculate derivative
  11. # h is delta of W
  12. h = 1e-2
  13.  
  14. # For partial derivatives
  15. tmp_val = w
  16.  
  17. # Calculate forward values
  18. W = [tmp_val + h for i in range(len(X))]
  19. _hypo = list(map(hypo, W, X))
  20. fxh1 = sum(list(map(cost, _hypo, y))) # Skip to use sum
  21.  
  22. # Calculate backward values
  23. W = [tmp_val - h for i in range(len(X))]
  24. _hypo = list(map(hypo, W, X))
  25. fxh2 = sum(list(map(cost, _hypo, y))) # Skip to use sum
  26.  
  27. # Calculate the diff
  28. grad = (fxh1 - fxh2) / (2*h)
  29.  
  30. return grad
  31.  
  32. # Input
  33. X = [i * 0.01 for i in range(-200, 200)]
  34. # Answer
  35. Y = [i > 0 for i in range(-100, 100)]
  36. # Weight
  37. w = 0.6
  38. # Learning rate
  39. lr = 0.001
  40.  
  41. costs = []
  42. trials = [i for i in range(10)]
  43.  
  44. for t in trials:
  45. W = [w for i in range(len(X))]
  46. _hypo = list(map(hypo, W, X))
  47. _cost = sum(list(map(cost, _hypo, Y))) / len(_hypo)
  48. costs.append(_cost)
  49.  
  50. grad = cost_gradient(w, X, Y)
  51.  
  52. w = w - lr * grad
  53. plt.plot(t, _cost, "o", label="w = {0:4.3f}".format(w))
  54.  
  55. plt.plot(trials, costs)
  56. plt.xlabel("trials")
  57. plt.ylabel("cost")
  58. plt.grid()
  59. plt.legend(numpoints=1, loc="upper right", ncol=2)
  60. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement