Guest User

Untitled

a guest
Dec 11th, 2017
78
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.98 KB | None | 0 0
  1. function [J, grad] = GXNNCostF(Tin, L1neurons, L2neurons, nLabs, X, y, lambda)
  2.  
  3.  
  4. % Reshape np back into param Mx Theta1 & Theta2 weights for 2-layer NN
  5.  
  6. Theta1 = reshape(Tin(1:L2neurons * (L1neurons + 1)), L2neurons, (L1neurons + 1));
  7. Theta2 = reshape(Tin((1 + (L2neurons * (L1neurons + 1))):end), nLabs, (L2neurons + 1));
  8. % Theta1 = reshape(Tin(1:L2neurons*L1neurons), L2neurons, L1neurons);
  9. % Theta2 = reshape( Tin(L2neurons*L1neurons+1:end), nLabs, L2neurons );
  10.  
  11.  
  12. YY = y;
  13. XX = X;
  14.  
  15. m = size(X, 1);
  16.  
  17.  
  18. %% Part 1: FEEDFORWARD NEURAL NETWORK
  19.  
  20. % PREP y FOR COST FUNCTION EVAL
  21.  
  22. y = YY; % YY has 7000 instances for each 1:2 labels
  23.  
  24. yd = eye(nLabs); % create 10x10 identity matrix for labels
  25.  
  26. y = yd(y,:); % Make y a 5000x10 matrix; cols contain 1 or 0
  27.  
  28.  
  29.  
  30. % MAP FROM LAYER-1 TO LAYER-2
  31.  
  32. X = XX;
  33.  
  34. a1 = [ones(m, 1) X]; % a1 is input layer; add bias ones col to X matrix (a1: 5000x401)
  35.  
  36. z2 = a1 * Theta1'; % Coverts to matrix of 5000 examples x 25 thetas (z2: 5000x25)
  37.  
  38. a2 = SIG(z2); % Sigmoid function converts to p between 0 to 1 (a2: 5000x25)
  39.  
  40.  
  41.  
  42. % MAP FROM LAYER 2 TO LAYER 3
  43.  
  44. a2 = [ones(m, 1) a2]; % Add ones to the h1 data matrix (a2: 5000x26)
  45.  
  46. z3 = a2 * Theta2'; % Converts to matrix of 5000 exampls x num_labels (z3: 5000x2)
  47.  
  48. a3 = SIG(z3); % Sigmoid function converts to p between 0 to 1 (a3: 5000x2)
  49.  
  50.  
  51.  
  52.  
  53. % EVALUATE UNREGULARIZED LOGISTIC COST FUNCTION
  54.  
  55. logisf = y .* log(a3) + (1-y) .* log(1-a3); % y as a matrix uses element-wise product (.*)
  56.  
  57. Jun = (-1/m) .* sum(sum(logisf)); % Cost function without regularization
  58.  
  59.  
  60.  
  61. % REGULARIZE COST FUNCTION
  62.  
  63. T1 = Theta1(:,2:end); % 25x400
  64.  
  65. T2 = Theta2(:,2:end); % 10x25
  66.  
  67. J = Jun + (lambda/(2*m)) .* (sum(sum(T1.^2)) + sum(sum(T2.^2)));
  68.  
  69.  
  70.  
  71.  
  72.  
  73.  
  74. %% Part 2: BACKPROPAGATION
  75.  
  76. D3 = a3 - y;
  77.  
  78. D2 = D3 * T2 .* SIGRAD( z2 );
  79.  
  80. Delta2 = (1/m) .* (D3' * a2); % Same size as Theta2_grad (10x26)
  81.  
  82. Delta1 = (1/m) .* (D2' * a1); % Same size as Theta1_grad (25x401)
  83.  
  84.  
  85.  
  86.  
  87.  
  88. grad = [Delta1(:) ; Delta2(:)]; % Unroll gradients
  89.  
  90. end
Add Comment
Please, Sign In to add comment