 # NeuralNetworkFrameworkBetaVersion3

Dec 9th, 2022 (edited)
629
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
1. '''
2. This neural network framework only nudges weights when it performs backpropagation. It can handle multiple outputs.
3. '''
4.
5. import numpy as np
6. import random
7.
8. class Layer:
9.     def __init__(self, inputNodes, outputNodes):
10.         self.weights = 0.1 * np.random.randn(inputNodes, outputNodes)
11.         self.biases = np.zeros((1, outputNodes))
12.
13.     def forward(self, inputs):
14.         self.output = np.dot(inputs, self.weights) + self.biases
15. class Activation_ReLU:
16.     def forward(self, inputs):
17.         self.output = np.maximum(0, inputs)
18.
19. learningRate = 0.0000001
20. def backwards(network, input_, desired):
21.     currentLayer = len(network) - 1
22.
23.
24.     dError = 2*(network[currentLayer].output - desired)
25.
26.     gradients = np.zeros((len(network), 5)) #The digit here represent maximum number of neurons per layer
27.
28.     for neuronsPerLastLayer in range(len(network[currentLayer].output)):
30.
31.     currentLayer = len(network) - 1
32.     while currentLayer > 0: # Per layer
33.         if type(network[currentLayer - 1]) == Activation_ReLU:
34.             pass
35.         else:
36.                 #Nudge the weights
37.                 for neuronCurrentLayer in range(len(network[currentLayer].output)): # Per neuron in current layer
38.                     for neuronPreviousLayer in range(len(network[currentLayer - 1].output)): # Per neuron in previous layer
39.                         network[currentLayer].weights[neuronPreviousLayer][neuronCurrentLayer] -= network[currentLayer - 1].output[neuronPreviousLayer] * gradients[currentLayer][neuronCurrentLayer] * learningRate
40.
41.
42.                 # Calculate gradients for every neuron in the next layer you're going to adjust
43.                 for neuronCurrentLayer in range(len(network[currentLayer].output)): # Per neuron in current layer
44.                     #print("Neuron ", neuronCurrentLayer + 1, ": ")
45.                     for neuronPreviousLayer in range(len(network[currentLayer - 1].output)): # Per neuron in previous layer
47.
48.         currentLayer -= 1 #Go to previous layer
50.     print("Error: ", (network[len(network) - 1].output - desired))
51.
52. #Create training data
53. inputs = [3, 6, 2, 8, 12, 90, 45, 23, 88, 18]
54. desired = np.array([[6, 6], [12, 12], [4, 4], [16, 16], [24, 24], [180, 180], [90, 90], [46, 46], [176, 176], [36, 36]])
55. #inputs = 12
56. #desired = np.array([24, 24])
57.
58. #Create neural network
59. layer1 = Layer(1, 5)
60.
61. layer2 = Layer(5, 3)
62.
63. layer3 = Layer(3, 4)
64.
65. layer4 = Layer(4, 2)
66.
67. #Train the network
68. for iteration in range(5000):
69.     for x in range(len(inputs)):
70.         layer1.forward(inputs[x])
71.         layer2.forward(layer1.output)
72.         layer3.forward(layer2.output)
73.         layer4.forward(layer3.output)
74.         backwards([layer1, layer2, layer3, layer4], inputs[x], desired[x])
75.
76. #Test the network
77. userInput = 333
78. layer1.forward(userInput)
79. layer2.forward(layer1.output)
80. layer3.forward(layer2.output)
81. layer4.forward(layer3.output)
82.
83. print("Guess: ", layer4.output)