Advertisement
programcreator

NeuralNet

Mar 7th, 2015
2,259
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Lua 5.45 KB | None | 0 0
  1. --[[
  2.   A new attempt at coding a neural Network
  3.   Author: Wassil Janssen a.k.a. Creator
  4. ]]--
  5. --[[Class Neuron]]
  6. local function Neuron(numOutputs,myIndex)
  7.   --[[Private]]--
  8.   local m_outputVal = 0
  9.   local m_outputWeights = {}
  10.   local m_myIndex = myIndex
  11.   local m_gradient = 0
  12.   local eta = 0.15
  13.   local alpha = 0.5
  14.   local function transferFunction(x)
  15.     return 1 / (1 + math.exp(-x /0.5))
  16.     --return math.tanh(x)
  17.   end
  18.   local function transferFunctionDerivative(x)
  19.     return 1-x*x
  20.   end
  21.   local function sumDOW(nextLayer)
  22.     sum = 0
  23.     for n=1,#nextLayer do
  24.       sum = sum + m_outputWeights[n].weight * nextLayer[n].getGradient()
  25.  
  26.     end
  27.     return sum
  28.   end
  29.  
  30.   --[[Public]]--
  31.   local self = {}
  32.   function self.setInfo(tabl)
  33.     m_gradient = tabl.gra
  34.     m_outputWeights = tabl.wei
  35.     m_outputVal = tabl.val
  36.   end
  37.   function self.getInfo()
  38.     return {
  39.       gra = m_gradient,
  40.       wei = m_outputWeights,
  41.       val = m_outputVal,
  42.     }
  43.   end
  44.   function self.getGradient()
  45.     return m_gradient
  46.   end
  47.   function self.setOutputVal(val)
  48.     m_outputVal = val
  49.   end
  50.   function self.getOutputVal()
  51.     return m_outputVal
  52.   end
  53.   function self.getOutputWeights()
  54.     return m_outputWeights
  55.   end
  56.   function self.feedForward(prevLayer)
  57.     local sum = 0
  58.     for n=0,#prevLayer do
  59.       sum = sum + prevLayer[n].getOutputVal()*(prevLayer[n].getOutputWeights())[m_myIndex].weight
  60.     end
  61.     m_outputVal = transferFunction(sum)
  62.   end
  63.   function self.calcOutputGradients(targetVal)
  64.     local delta = targetVal - m_outputVal
  65.     m_gradient = delta * transferFunctionDerivative(m_outputVal)
  66.   end
  67.   function self.calcHiddenGradients(nextLayer)
  68.     local dow = sumDOW(nextLayer)
  69.     m_gradient = dow*transferFunctionDerivative(m_outputVal)
  70.   end
  71.   function self.changeConn(index,deltaWeight,weight)
  72.     m_outputWeights[index] = {weight = weight, deltaWeight = deltaWeight}
  73.   end
  74.   function self.updateInputWeights(prevLayer)
  75.     for n=0,#prevLayer do
  76.       local neuron = prevLayer[n]
  77.       local oldDeltaWeight = (neuron.getOutputWeights())[m_myIndex].deltaWeight
  78.       local newDeltaWeight = eta * neuron.getOutputVal() * m_gradient + alpha*oldDeltaWeight
  79.       local newWeight = neuron.getOutputWeights()[m_myIndex].weight + newDeltaWeight
  80.       neuron.changeConn(m_myIndex,newDeltaWeight,newWeight)
  81.     end
  82.   end
  83.  
  84.   --[[Constructor]]--
  85.   for c=1,numOutputs do
  86.     m_outputWeights[c] = {
  87.       weight = math.random(),
  88.       deltaWeight = 0,
  89.     }
  90.   end
  91.   return self
  92. end
  93.  
  94. --[[Class Net]]
  95. function Net(topology)
  96.     --[[Private]]--
  97.     local m_layers = {} --m_layers[layerNum][neuronNum]
  98.  
  99.     --[[Public]]--
  100.     local self = {}
  101.     function self.feedForward(inputVals)
  102.       assert(#inputVals == #m_layers[1],"Mate, you are coding like a girl! Just joking, girls can code too... not like you!")
  103.       for i=1,#inputVals do
  104.         m_layers[1][i].setOutputVal(inputVals[i])
  105.       end
  106.       for i=2,#m_layers do
  107.         for n=1,#m_layers[i] do
  108.           m_layers[i][n].feedForward(m_layers[i-1])
  109.         end
  110.       end
  111.     end
  112.     function self.backProp(targetVals)
  113.       --Calculate overall net error (RMS)
  114.       local outputLayer = m_layers[#m_layers]
  115.       local m_error = 0
  116.       for n=1,#outputLayer do
  117.         local delta = targetVals[n] - outputLayer[n].getOutputVal()
  118.         m_error = m_error + delta*delta
  119.       end
  120.       m_error = m_error/#outputLayer
  121.       m_error = math.sqrt(m_error)
  122.  
  123.       --Calculate output layer gradient
  124.       for n=1,#outputLayer do
  125.         outputLayer[n].calcOutputGradients(targetVals[n])
  126.       end
  127.  
  128.       --Calculate gradients on hidden layers
  129.       for layerNum = #m_layers-1,2,-1 do
  130.         local hiddenLayer = m_layers[layerNum]
  131.         local nextLayer = m_layers[layerNum+1]
  132.  
  133.         for n=0,#hiddenLayer do
  134.           hiddenLayer[n].calcHiddenGradients(nextLayer)
  135.         end
  136.       end
  137.  
  138.       --Update connection weights
  139.       for layerNum = #m_layers,2,-1 do
  140.         local layer = m_layers[layerNum]
  141.         local prevLayer = m_layers[layerNum - 1]
  142.  
  143.         for n=1,#layer do
  144.           layer[n].updateInputWeights(prevLayer)
  145.         end
  146.       end
  147.  
  148.     end
  149.     function self.getResults(resultVals)
  150.       local resultVals = {}
  151.       for n=1,#m_layers[#m_layers] do
  152.         resultVals[#resultVals + 1] = m_layers[#m_layers][n].getOutputVal()
  153.       end
  154.       return resultVals
  155.     end
  156.     --[[Constructor]]--
  157.  
  158.     local numLayers = #topology
  159.     for i=1,numLayers do
  160.       local numOutputs = i == numLayers and 0 or topology[i+1]
  161.       m_layers[i] = {}
  162.       for n=1,topology[i] do
  163.         m_layers[i][n] = Neuron(numOutputs,n)
  164.         --print("Made a Neuron")
  165.       end
  166.       m_layers[i][0] = Neuron(numOutputs,0)
  167.       m_layers[i][0].setOutputVal(1)
  168.       --print("Made a Neuron")
  169.       --print(numOutputs)
  170.     end
  171.     function self.serialize()
  172.       local output = {}
  173.       for i=1,#m_layers do
  174.         output[i] = {}
  175.         for m=1,#m_layers[i] do
  176.           output[i][m] = m_layers[i][m].getInfo()
  177.         end
  178.       end
  179.       return output
  180.     end
  181.     function self.unserialize(tabl)
  182.       for i=1,#tabl do
  183.         for m=1,#tabl[i] do
  184.           m_layers[i][m].setInfo(tabl[i][m])
  185.         end
  186.       end
  187.     end
  188.  
  189.     return self
  190.   end
  191.  
  192.   function unserialize(input)
  193.     local topology = {}
  194.     for i=1,#input do
  195.       topology[i] = #input[i]
  196.     end
  197.     local tempNet = Net(topology)
  198.     tempNet.unserialize(input)
  199.     return tempNet
  200.   end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement