Guest User

Untitled

a guest
Jan 16th, 2018
125
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.71 KB | None | 0 0
  1. import numpy as np
  2. # Setting the random seed, feel free to change it and see different solutions.
  3. np.random.seed(42)
  4.  
  5. def stepFunction(t):
  6. if t >= 0:
  7. return 1
  8. return 0
  9.  
  10. def prediction(X, W, b):
  11. return stepFunction((np.matmul(X,W)+b)[0])
  12.  
  13. # TODO: Fill in the code below to implement the perceptron trick.
  14. # The function should receive as inputs the data X, the labels y,
  15. # the weights W (as an array), and the bias b,
  16. # update the weights and bias W, b, according to the perceptron algorithm,
  17. # and return W and b.
  18. def perceptronStep(X, y, W, b, learn_rate = 0.01):
  19. for i in range(len(X)):
  20. y_hat = prediction(X[i],W,b)
  21. if y[i]-y_hat == 1:
  22. W[0] += X[i][0]*learn_rate
  23. W[1] += X[i][1]*learn_rate
  24. b += learn_rate
  25. elif y[i]-y_hat == -1:
  26. W[0] -= X[i][0]*learn_rate
  27. W[1] -= X[i][1]*learn_rate
  28. b -= learn_rate
  29. return W, b
  30.  
  31. # This function runs the perceptron algorithm repeatedly on the dataset,
  32. # and returns a few of the boundary lines obtained in the iterations,
  33. # for plotting purposes.
  34. # Feel free to play with the learning rate and the num_epochs,
  35. # and see your results plotted below.
  36. def trainPerceptronAlgorithm(X, y, learn_rate = 0.01, num_epochs = 25):
  37. x_min, x_max = min(X.T[0]), max(X.T[0])
  38. y_min, y_max = min(X.T[1]), max(X.T[1])
  39. W = np.array(np.random.rand(2,1))
  40. b = np.random.rand(1)[0] + x_max
  41. # These are the solution lines that get plotted below.
  42. boundary_lines = []
  43. for i in range(num_epochs):
  44. # In each epoch, we apply the perceptron step.
  45. W, b = perceptronStep(X, y, W, b, learn_rate)
  46. boundary_lines.append((-W[0]/W[1], -b/W[1]))
  47. return boundary_lines
Add Comment
Please, Sign In to add comment