# NeuralNetworkFrameworkBetaVersion1

Dec 9th, 2022 (edited)
952
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
1. '''
2. This neural network framework only nudges weights when it performs backpropogation. Also it only works on 1 single network output.
3. '''
4.
5. import numpy as np
6. import random
7.
8. class Layer:
9.     def __init__(self, inputNodes, outputNodes):
10.         self.weights = 0.1 * np.random.randn(inputNodes, outputNodes)
11.         #self.weights = 2 + np.zeros((inputNodes, outputNodes))
12.         self.biases = np.zeros((1, outputNodes))
13.
14.     def forward(self, inputs):
15.         self.output = np.dot(inputs, self.weights) + self.biases
16.
17. class Activation_ReLU:
18.     def forward(self, inputs):
19.         self.output = np.maximum(0, inputs)
20.
21. learningRate = 0.0001
22. def backwards(network, input_, desired):
23.     currentLayer = len(network) - 1
24.     dError = 2*(network[currentLayer].output - desired)
25.
28.
29.     currentLayer = len(network) - 1
30.     while currentLayer >= 0: # Per layer
31.         print("Current layer: ", currentLayer + 1, "--------------------------------------")
32.         if type(network[currentLayer - 1]) == Activation_ReLU:
33.             pass
34.         else:
35.             if currentLayer != 0:
36.
37.                 #Nudge the weights
38.                 for neuronCurrentLayer in range(len(network[currentLayer].output[0])): # Per neuron in current layer
39.                     #print("Neuron ", neuronCurrentLayer + 1, ": ")
40.                     for neuronPreviousLayer in range(len(network[currentLayer - 1].output[0])): # Per neuron in previous layer
41.                         network[currentLayer].weights[neuronPreviousLayer][neuronCurrentLayer] -= network[currentLayer - 1].output[0][neuronPreviousLayer] * gradients[currentLayer][neuronCurrentLayer] * learningRate
42.                         #print(network[currentLayer].weights[neuronPreviousLayer][neuronCurrentLayer])
43.
44.
45.                 # Calculate gradients for every neuron in the next layer you're going to adjust
46.                 for neuronCurrentLayer in range(len(network[currentLayer].output[0])): # Per neuron in current layer
47.                     #print("Neuron ", neuronCurrentLayer + 1, ": ")
48.                     for neuronPreviousLayer in range(len(network[currentLayer - 1].output[0])): # Per neuron in previous layer
50.
51.             '''
52.            else: #Special for first layer
53.                for outputNodessss in range(len(network[0].output[0])):
54.                    print("Neuron ", outputNodessss + 1, ": ")
55.                    for inputNodessss in range(len(input_)):
56.                        print(network[0].weights[inputNodessss][outputNodessss])
57.            '''
58.
59.         '''
60.        print("Weights: ", network[currentLayer].weights)
61.        print("Layer output: ", network[currentLayer].output[0])
63.        '''
64.
65.         currentLayer -= 1 #Go to previous layer
66.
67.     '''
68.    print("-----------------------------------")
71.    '''
72.     print("Error: ", (network[len(network) - 1].output[0] - desired))
73.
74. #Create neural network
75. inputs = [4, 6, 1, 3, 9, 2, 3, 7, 10, 34]
76. desired = [8, 12, 2, 6, 18, 4, 6, 14, 20, 68]
77. layer1 = Layer(1, 3)
78. activation1 = Activation_ReLU()
79. layer2 = Layer(3, 3)
80. activation2 = Activation_ReLU()
81. layer3 = Layer(3, 1)
82.
83.
84. #Train the network
85. for x in range(500):
86.     for iteration in range(10):
87.         layer1.forward(inputs[iteration])
88.         layer2.forward(layer1.output)
89.         layer3.forward(layer2.output)
90.         backwards([layer1, layer2, layer3], inputs[iteration], desired[iteration])
91.
92. #Test the network
93. userInput = 49
94. layer1.forward(userInput)
95. layer2.forward(layer1.output)
96. layer3.forward(layer2.output)
97. print("Guess: ", layer3.output)
98.