Advertisement
Guest User

Untitled

a guest
Feb 20th, 2018
61
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.30 KB | None | 0 0
  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3.  
  4. bias = 1
  5.  
  6. # XOR Function (x, y, b, l)
  7. xorFunction = [[1, 1, bias, 0],
  8. [1, 0, bias, 1],
  9. [0, 1, bias, 1],
  10. [0, 0, bias, 0]]
  11.  
  12. # Weight Configuration
  13. h1p = 0.65
  14. h2p = 1.10
  15. b = -1.00
  16. x_x = -5.00
  17. x_y = 1.90
  18.  
  19. # Weights (w_x, w_y, b)
  20. h1Weights = [h1p, h1p, b]
  21. h2Weights = [h2p, h2p, b]
  22. xorWeights = [x_x, x_y, b]
  23.  
  24. # Dot product
  25. def netValue (weights, inputs):
  26. netVal = 0
  27. for i in range(len(weights)):
  28. netVal = netVal + (weights[i] * inputs[i])
  29. return netVal
  30.  
  31. # Activation function using tanh
  32. def tanhActivation (x):
  33. return np.tanh(x)
  34.  
  35. # Activation function using the step function
  36. def stepActivation (x):
  37. if x>=0:
  38. return 1
  39. else:
  40. return 0
  41.  
  42. # Update function using stepActivation
  43. # @inputs is a row of xorFunction
  44. def stepUpdate (weights, inputs, learningRate):
  45. dataLabel = inputs[3]
  46. # dataPoint = inputs - dataLabel
  47. dataPoint = inputs
  48. del dataPoint[3]
  49. # actVal = netValue --> activation
  50. actVal = stepActivation(netValue(weights,inputs))
  51. for i in range(len(weights)):
  52. ##this line below here
  53. weights[i] = weights[i] + learningRate*dataPoint[i]*(dataLabel-actVal)
  54. ##that line above here
  55.  
  56. return weights
  57.  
  58. ##### MAIN FUNCTION #####
  59.  
  60. # Learning
  61. epochs = 10000
  62. learningRate = 0.01
  63. for epoch in range(epochs):
  64. for i in range(len(xorFunction)): # each data point
  65. dataRow = xorFunction[i]
  66. dataLabel = xorFunction[3]
  67. dataPoint = xorFunction
  68. del dataPoint[3]
  69. # hlVal1 = netValue --> activation (of first node of HL)
  70. hlVal1= tanhActivation(netValue(h1Weights, dataPoint))
  71. # hlVal2 = netValue --> activation (of second node)
  72. hlVal2 = tanhActivation(netValue(h2Weights, dataPoint))
  73. # construct the input "row" for the next layer
  74. xorInput = [hlVal1, hlVal2, bias, dataLabel]
  75. # perform an update (step)
  76. xorWeights = stepUpdate(xorWeights, xorInput, learningRate)
  77.  
  78. # Plotting
  79. incVal = .05 # resolution
  80. minVal = -1
  81. maxVal = 2.2
  82. size = incVal * 250 # calculates reasonable size for points
  83.  
  84. print ("Generating plot... ", end="", flush=True)
  85. xValues = np.arange(minVal,maxVal,incVal)
  86. yValues = np.arange(minVal,maxVal,incVal)
  87. x,y = np.meshgrid(xValues, yValues)
  88. testGrid = np.array((x.ravel(), y.ravel())).T
  89.  
  90. for i in range(len(testGrid)):
  91. # generate data "row" from testGrid
  92. features = [testGrid[i][0], testGrid[i][1], bias]
  93. # actVal1/actVal2 = netValue --> activation
  94. actVal1 = tanhActivation(netValue(h1Weights, features))
  95. actVal2 = tanhActivation(netValue(h2Weights, features))
  96. # construct next layer data "row"
  97. xorInput = [actVal1, actVal2, bias]
  98. # result = netValue --> activation
  99. result = stepActivation(netValue(xorWeights, xorInput))
  100. if (result == 1):
  101. plt.plot(features[0], features[1], 'bs', ms=size, label='True')
  102. else:
  103. plt.plot(features[0], features[1], 'rs', ms=size, label='False')
  104.  
  105. plt.xlim((minVal, maxVal - incVal))
  106. plt.ylim((minVal, maxVal - incVal))
  107. plt.title('XOR function (p xor q)')
  108. plt.xlabel('p value')
  109. plt.ylabel('q value')
  110. plt.legend()
  111. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement