Advertisement
Guest User

Untitled

a guest
May 12th, 2019
105
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Octave 1.41 KB | None | 0 0
  1. function [network terr] = backprop(tset, tslb, networkInit, lr)
  2. % derivative of sigmoid activation function
  3. % tset - training set (every row represents act sample)
  4. % tslb - column vector of labels
  5. % networkInit - initial layers weight matrix
  6. % lr - learning rate
  7.  
  8. % network - layers weight matrix
  9. % terr - total squared error of the ANN
  10.  
  11.     % 1. Set output matrices to initial values
  12.     network = networkInit;
  13.     numLayers = numel(network) + 1;
  14.     numLabels = columns(network{numLayers-1});
  15.     M = rows(tset);
  16.  
  17.     % 2. Propagate input forward through the ANN
  18.     act{1} = tset;
  19.     for i=2:numLayers
  20.         response{i} = [act{i-1} ones(M, 1)] * network{i-1};
  21.         act{i} = actf(response{i});
  22.     endfor
  23.  
  24.     % 2. init gradients for weights
  25.     for i=2:numLayers
  26.         networkGrad{i-1} = zeros(size(network{i-1}));
  27.     endfor
  28.  
  29.     % 3. Set desired output of the ANN
  30.     desiredOut = zeros(M, numLabels);
  31.     for i=1:M
  32.         desiredOut(i, tslb(i)) = 1;
  33.     endfor
  34.  
  35.     % 4. Compute gradients
  36.     d{numLayers} = desiredOut - act{numLayers};
  37.     for i=numLayers-1: -1: 1
  38.         d{i} = (d{i+1} * network{i}') .* [actdf(act{i}) ones(M, 1)];
  39.         d{i} = d{i}(:, 1:end-1);
  40.         D{i} = d{i+1}' * [act{i} ones(M, 1)];
  41.         networkGrad{i} = lr * D{i}';
  42.     endfor
  43.  
  44.     % 5. Adjust total error (just to know this value)
  45.     terr = 0.5 * sum((act{numLayers}-desiredOut)(:).^2) / M;
  46.  
  47.     % 6. Apply gradients
  48.     for i=2:numLayers
  49.         network{i-1} = network{i-1} + networkGrad{i-1};
  50.     endfor
  51. endfunction
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement