Advertisement
Guest User

Neural Net

a guest
May 5th, 2022
125
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Lua 10.71 KB | None | 0 0
  1. --Neural Netowrk to solve XOR problem
  2. --created by Adam Dorwart
  3.  
  4. math.randomseed(os.time())
  5. dofile("NNlib.lua")
  6. dofile("TrainingSet.lua")
  7.  
  8. print("Enter the amount of hidden layers in Neural Network (recommended|1) : ")
  9. hiddenLayers = tonumber(io.stdin:read())
  10.  
  11. print("Enter the amount of Neurons per hidden layer(recommended|4) : ")
  12. neuronsPerLayer = tonumber(io.stdin:read())
  13.  
  14. print("Enter the learing rate (0-100) (recommened|50) : ")
  15. learningRate = tonumber(io.stdin:read()/100)
  16.  
  17. network = NeuralNetwork.create(TrainingSet.inputs,TrainingSet.outputs,hiddenLayers,neuronsPerLayer,learningRate)
  18.  
  19. print("Enter the number of attempts at Teaching the Neural Network (Try 4000): ")
  20. attempts = tonumber(io.stdin:read())
  21.  
  22. print("Teaching...")
  23.  
  24. network:train( TrainingSet, attempts)
  25. print(network:test( TrainingSet))
  26.  
  27. print("Saving Neural Network...")
  28. NN = io.open("XORGate","w")
  29. NN:write(network:save())
  30. NN:flush()
  31. NN:close()
  32.  
  33. -- THEN
  34.  
  35. ACTIVATION_RESPONSE = 1
  36.  
  37.  
  38. NeuralNetwork = {
  39.     transfer = function( x) return 1 / (1 + math.exp(-x / ACTIVATION_RESPONSE)) end --This is the Transfer function (in this case a sigmoid)
  40. }
  41.  
  42. function NeuralNetwork.create( _numInputs, _numOutputs, _numHiddenLayers, _neuronsPerLayer, _learningRate)
  43.     _numInputs = _numInputs or 1
  44.     _numOutputs = _numOutputs or 1
  45.     _numHiddenLayers = _numHiddenLayers or math.ceil(_numInputs/2)
  46.     _neuronsPerLayer = _neuronsPerLayer or math.ceil(_numInputs*.66666+_numOutputs)
  47.     _learningRate = _learningRate or .5
  48.     --order goes network[layer][neuron][wieght]
  49.     local network = setmetatable({
  50.         learningRate = _learningRate
  51.     },{ __index = NeuralNetwork});
  52.     network[1] = {}   --Input Layer
  53.     for i = 1,_numInputs do
  54.         network[1][i] = {}
  55.     end
  56.     for i = 2,_numHiddenLayers+2 do --plus 2 represents the output layer (also need to skip input layer)
  57.         network[i] = {}
  58.         local neuronsInLayer = _neuronsPerLayer
  59.         if i == _numHiddenLayers+2 then
  60.             neuronsInLayer = _numOutputs
  61.         end
  62.         for j = 1,neuronsInLayer do
  63.             network[i][j] = {bias = math.random()*2-1}
  64.             local numNeuronInputs = table.getn(network[i-1])
  65.             for k = 1,numNeuronInputs do
  66.                 network[i][j][k] = math.random()*2-1  --return random number between -1 and 1
  67.             end
  68.         end
  69.     end
  70.     return network
  71. end
  72.  
  73. function NeuralNetwork:forewardPropagate(...)
  74.     if table.getn(arg) ~= table.getn(self[1]) and type(arg[1]) ~= "table" then
  75.         error("Neural Network received "..table.getn(arg).." input[s] (expected "..table.getn(self[1]).." input[s])",2)
  76.     elseif type(arg[1]) == "table" and table.getn(arg[1]) ~= table.getn(self[1]) then
  77.         error("Neural Network received "..table.getn(arg[1]).." input[s] (expected "..table.getn(self[1]).." input[s])",2)
  78.     end
  79.     local outputs = {}
  80.     for i = 1,table.getn(self) do
  81.         for j = 1,table.getn(self[i]) do
  82.             if i == 1 then
  83.                 if type(arg[1]) == "table" then
  84.                     self[i][j].result = arg[1][j]
  85.                 else
  86.                     self[i][j].result = arg[j]
  87.                 end
  88.             else
  89.                 self[i][j].result = self[i][j].bias
  90.                 for k = 1,table.getn(self[i][j]) do
  91.                     self[i][j].result = self[i][j].result + (self[i][j][k]*self[i-1][k].result)
  92.                 end
  93.                 self[i][j].result = NeuralNetwork.transfer(self[i][j].result)
  94.                 if i == table.getn(self) then
  95.                     table.insert(outputs,self[i][j].result)
  96.                 end
  97.             end
  98.         end
  99.  
  100.     end
  101.     return outputs
  102. end
  103.  
  104. function NeuralNetwork:backwardPropagate(inputs,desiredOutputs)
  105.     if table.getn(inputs) ~= table.getn(self[1]) then
  106.         error("Neural Network received "..table.getn(inputs).." input[s] (expected "..table.getn(self[1]).." input[s])",2)
  107.     elseif table.getn(desiredOutputs) ~= table.getn(self[table.getn(self)]) then
  108.         error("Neural Network received "..table.getn(desiredOutputs).." desired output[s] (expected "..table.getn(self[table.getn(self)]).." desired output[s])",2)
  109.     end
  110.     self:forewardPropagate(inputs) --update the internal inputs and outputs
  111.     for i = table.getn(self),2,-1 do --iterate backwards (nothing to calculate for input layer)
  112.         local tempResults = {}
  113.         for j = 1,table.getn(self[i]) do
  114.             if i == table.getn(self) then --special calculations for output layer
  115.                 self[i][j].delta = (desiredOutputs[j] - self[i][j].result) * self[i][j].result * (1 - self[i][j].result)
  116.             else
  117.                 local weightDelta = 0
  118.                 for k = 1,table.getn(self[i+1]) do
  119.                     weightDelta = weightDelta + self[i+1][k][j]*self[i+1][k].delta
  120.                 end
  121.                 self[i][j].delta = self[i][j].result * (1 - self[i][j].result) * weightDelta
  122.             end
  123.         end
  124.     end
  125.     for i = 2,table.getn(self) do
  126.         for j = 1,table.getn(self[i]) do
  127.             self[i][j].bias = self[i][j].delta * self.learningRate
  128.             for k = 1,table.getn(self[i][j]) do
  129.                 self[i][j][k] = self[i][j][k] + self[i][j].delta * self.learningRate * self[i-1][k].result
  130.             end
  131.         end
  132.     end
  133. end
  134.  
  135. function NeuralNetwork:train( trainingSet, attempts)
  136.     while attempts > 0 do
  137.         for i = 1,table.getn(trainingSet) do
  138.             self:backwardPropagate(trainingSet[i].input,trainingSet[i].output)
  139.         end
  140.         attempts = attempts - 1
  141.     end
  142. end
  143.  
  144. function NeuralNetwork:test( trainingSet)
  145.     local testResults = "Training Test For Nerual Network:\n"
  146.     local totalErrorMargin = 0
  147.     for i = 1,table.getn(trainingSet) do
  148.         local results = self:forewardPropagate(trainingSet[i].input)
  149.         local errorMargin = 0
  150.         testResults = testResults.."\tSet #"..i.."\n\t\tInput:\n"
  151.         for j = 1,trainingSet.inputs do
  152.             testResults = testResults.."\t\t\t"..trainingSet[i].input[j].."\n"
  153.         end
  154.         testResults = testResults.."\t\tDesired Output:\n"
  155.         for j = 1,trainingSet.outputs do
  156.             testResults = testResults.."\t\t\t"..trainingSet[i].output[j].."\n"
  157.         end
  158.         testResults = testResults.."\t\tActual Output:\n"
  159.         for j = 1,table.getn(results) do
  160.             testResults = testResults.."\t\t\t"..results[j].."\n"
  161.             errorMargin = errorMargin + math.abs(results[j] - trainingSet[i].output[j])
  162.         end
  163.         errorMargin = errorMargin / table.getn(results)
  164.         totalErrorMargin = totalErrorMargin + errorMargin
  165.         testResults = testResults.."\t\tAverage Output Error Margin: "..errorMargin.."\n\t\tAverage Percentage of Accuracy: "..string.format("%.3f",100-errorMargin*100).."%\n"
  166.  
  167.     end
  168.     totalErrorMargin = totalErrorMargin / table.getn(trainingSet)
  169.     testResults = testResults.."    Overall Average Error Margin of Trained Sets: "..totalErrorMargin.."\n  Overall Average Percentage of Accuracy: "..string.format("%.3f",100-totalErrorMargin*100).."%\n"
  170.  
  171.     return testResults
  172. end
  173.  
  174. function NeuralNetwork:save()
  175.     --[[
  176.     File specs:
  177.         |INFO| - should be FF BP NN
  178.         |I| - number of inputs
  179.         |O| - number of outputs
  180.         |HL| - number of hidden layers
  181.         |NHL| - number of neurons per hidden layer
  182.         |LR| - learning rate
  183.         |BW| - bias and weight values
  184.     ]]--
  185.     local data = "|INFO|FF BP NN|I|"..tostring(table.getn(self[1])).."|O|"..tostring(table.getn(self[table.getn(self)])).."|HL|"..tostring(table.getn(self)-2).."|NHL|"..tostring(table.getn(self[2])).."|LR|"..tostring(self.learningRate).."|BW|"
  186.     for i = 2,table.getn(self) do -- nothing to save for input layer
  187.         for j = 1,table.getn(self[i]) do
  188.             local neuronData = tostring(self[i][j].bias).."{"
  189.             for k = 1,table.getn(self[i][j]) do
  190.                 neuronData = neuronData..tostring(self[i][j][k])
  191.                 neuronData = neuronData..","
  192.             end
  193.             data = data..neuronData.."}"
  194.         end
  195.     end
  196.     data = data.."|END|"
  197.     return data
  198. end
  199. function NeuralNetwork.load(data)
  200.     local dataPos = string.find(data,"|")+1
  201.     local currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  202.     local dataPos = string.find(data,"|",dataPos)+1
  203.     local _inputs, _outputs, _hiddenLayers, _neuronsPerLayer, _learningRate
  204.     local biasWeights = {}
  205.     local errorExit = false
  206.     while currentChunk ~= "END" and not errorExit do
  207.         if currentChuck == "INFO" then
  208.             currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  209.             dataPos = string.find(data,"|",dataPos)+1
  210.             if currentChunk ~= "FF BP NN" then
  211.                 errorExit = true
  212.             end
  213.         elseif currentChunk == "I" then
  214.             currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  215.             dataPos = string.find(data,"|",dataPos)+1
  216.             _inputs = tonumber(currentChunk)
  217.         elseif currentChunk == "O" then
  218.             currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  219.             dataPos = string.find(data,"|",dataPos)+1
  220.             _outputs = tonumber(currentChunk)
  221.         elseif currentChunk == "HL" then
  222.             currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  223.             dataPos = string.find(data,"|",dataPos)+1
  224.             _hiddenLayers = tonumber(currentChunk)
  225.         elseif currentChunk == "NHL" then
  226.             currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  227.             dataPos = string.find(data,"|",dataPos)+1
  228.             _neuronsPerLayer = tonumber(currentChunk)
  229.         elseif currentChunk == "LR" then
  230.             currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  231.             dataPos = string.find(data,"|",dataPos)+1
  232.             _learningRate = tonumber(currentChunk)
  233.         elseif currentChunk == "BW" then
  234.             currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  235.             dataPos = string.find(data,"|",dataPos)+1
  236.             local subPos = 1
  237.             local subChunk
  238.             for i = 1,_hiddenLayers+1 do
  239.                 biasWeights[i] = {}
  240.                 local neuronsInLayer = _neuronsPerLayer
  241.                 if i == _hiddenLayers+1 then
  242.                     neuronsInLayer = _outputs
  243.                 end
  244.                 for j = 1,neuronsInLayer do
  245.                     biasWeights[i][j] = {}
  246.                     biasWeights[i][j].bias = tonumber(string.sub(currentChunk,subPos,string.find(currentChunk,"{",subPos)-1))
  247.                     subPos = string.find(currentChunk,"{",subPos)+1
  248.                     subChunk = string.sub( currentChunk, subPos, string.find(currentChunk,",",subPos)-1)
  249.                     local maxPos = string.find(currentChunk,"}",subPos)
  250.                     while subPos < maxPos do
  251.                         table.insert(biasWeights[i][j],tonumber(subChunk))
  252.                         subPos = string.find(currentChunk,",",subPos)+1
  253.                         if string.find(currentChunk,",",subPos) ~= nil then
  254.                             subChunk = string.sub( currentChunk, subPos, string.find(currentChunk,",",subPos)-1)
  255.                         end
  256.                     end
  257.                     subPos = maxPos+1
  258.                 end
  259.             end
  260.         end
  261.         currentChunk = string.sub( data, dataPos, string.find(data,"|",dataPos)-1)
  262.         dataPos = string.find(data,"|",dataPos)+1
  263.     end
  264.     if errorExit then
  265.         error("Failed to load Neural Network:"..currentChunk,2)
  266.     end
  267.     local network = setmetatable({
  268.         learningRate = _learningRate
  269.     },{ __index = NeuralNetwork});
  270.     network[1] = {}   --Input Layer
  271.     for i = 1,_inputs do
  272.         network[1][i] = {}
  273.     end
  274.     for i = 2,_hiddenLayers+2 do --plus 2 represents the output layer (also need to skip input layer)
  275.         network[i] = {}
  276.         local neuronsInLayer = _neuronsPerLayer
  277.         if i == _hiddenLayers+2 then
  278.             neuronsInLayer = _outputs
  279.         end
  280.         for j = 1,neuronsInLayer do
  281.             network[i][j] = {bias = biasWeights[i-1][j].bias}
  282.             local numNeuronInputs = table.getn(network[i-1])
  283.             for k = 1,numNeuronInputs do
  284.                 network[i][j][k] = biasWeights[i-1][j][k]
  285.             end
  286.         end
  287.     end
  288.     return network
  289. end
  290.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement